hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8b7174c5655353741ded63d1253adc06ea6c86d0
| 8,769
|
py
|
Python
|
tests/test_turn_based_multi_corridor.py
|
Leonardo767/Abmarl
|
9fada5447b09174c6a70b6032b4a8d08b66c4589
|
[
"Apache-2.0"
] | 7
|
2020-11-13T01:33:44.000Z
|
2021-03-05T14:30:34.000Z
|
tests/test_turn_based_multi_corridor.py
|
Leonardo767/Abmarl
|
9fada5447b09174c6a70b6032b4a8d08b66c4589
|
[
"Apache-2.0"
] | 91
|
2020-11-04T23:34:30.000Z
|
2021-06-08T17:18:00.000Z
|
tests/test_turn_based_multi_corridor.py
|
Leonardo767/Abmarl
|
9fada5447b09174c6a70b6032b4a8d08b66c4589
|
[
"Apache-2.0"
] | 6
|
2021-07-12T19:28:51.000Z
|
2022-03-01T00:50:02.000Z
|
import numpy as np
import pytest
from abmarl.sim.corridor import MultiCorridor as Corridor
from abmarl.managers import TurnBasedManager
def test_init():
sim = Corridor()
wrapped_sim = TurnBasedManager(sim)
assert wrapped_sim.sim == sim
assert wrapped_sim.agents == sim.agents
assert next(wrapped_sim.agent_order) == 'agent0'
assert next(wrapped_sim.agent_order) == 'agent1'
assert next(wrapped_sim.agent_order) == 'agent2'
assert next(wrapped_sim.agent_order) == 'agent3'
assert next(wrapped_sim.agent_order) == 'agent4'
def test_reset_and_step():
np.random.seed(24)
sim = TurnBasedManager(Corridor())
obs = sim.reset()
assert sim.sim.corridor[4].id == 'agent3'
assert sim.sim.corridor[5].id == 'agent4'
assert sim.sim.corridor[6].id == 'agent2'
assert sim.sim.corridor[7].id == 'agent1'
assert sim.sim.corridor[8].id == 'agent0'
assert sim.done_agents == set()
assert obs == {'agent0': {'left': [True], 'position': [8], 'right': [False]}}
obs, reward, done, info = sim.step({agent_id: Corridor.Actions.RIGHT for agent_id in obs})
assert obs == {'agent1': {'left': [True], 'position': [7], 'right': [False]}}
assert reward == {'agent1': 0}
assert done == {'agent1': False, '__all__': False}
obs, reward, done, info = sim.step({agent_id: Corridor.Actions.RIGHT for agent_id in obs})
assert obs == {'agent2': {'left': [True], 'position': [6], 'right': [False]}}
assert reward == {'agent2': 0}
assert done == {'agent2': False, '__all__': False}
obs, reward, done, info = sim.step({agent_id: Corridor.Actions.RIGHT for agent_id in obs})
assert obs == {'agent3': {'left': [False], 'position': [4], 'right': [True]}}
assert reward == {'agent3': 0}
assert done == {'agent3': False, '__all__': False}
obs, reward, done, info = sim.step({agent_id: Corridor.Actions.RIGHT for agent_id in obs})
assert obs == {'agent4': {'left': [True], 'position': [5], 'right': [False]}}
assert reward == {'agent4': -2}
assert done == {'agent4': False, '__all__': False}
obs, reward, done, info = sim.step({agent_id: Corridor.Actions.RIGHT for agent_id in obs})
assert obs == {
'agent0': {'left': [True], 'position': [9], 'right': [False]},
'agent1': {'left': [True], 'position': [8], 'right': [False]}}
assert reward == {'agent0': 100, 'agent1': -1}
assert done == {'agent0': True, 'agent1': False, '__all__': False}
with pytest.raises(AssertionError):
sim.step({'agent0': Corridor.Actions.STAY})
obs, reward, done, info = sim.step({'agent1': Corridor.Actions.STAY})
assert obs == {'agent2': {'left': [True], 'position': [7], 'right': [True]}}
assert reward == {'agent2': -1,}
assert done == {'agent2': False, '__all__': False}
obs, reward, done, info = sim.step({agent_id: Corridor.Actions.LEFT for agent_id in obs})
assert obs == {'agent3': {'left': [False], 'position': [4], 'right': [False]}}
assert reward == {'agent3': -5}
assert done == {'agent3': False, '__all__': False}
obs, reward, done, info = sim.step({agent_id: Corridor.Actions.STAY for agent_id in obs})
assert obs == {'agent4': {'left': [False], 'position': [6], 'right': [True]}}
assert reward == {'agent4': -3}
assert done == {'agent4': False, '__all__': False}
obs, reward, done, info = sim.step({agent_id: Corridor.Actions.LEFT for agent_id in obs})
assert obs == {'agent1': {'left': [True], 'position': [8], 'right': [False]}}
assert reward == {'agent1': -1}
assert done == {'agent1': False, '__all__': False}
obs, reward, done, info = sim.step({agent_id: Corridor.Actions.RIGHT for agent_id in obs})
assert obs == {'agent2': {'left': [False], 'position': [7], 'right': [False]}}
assert reward == {'agent2': -5}
assert done == {'agent2': False, '__all__': False}
obs, reward, done, info = sim.step({agent_id: Corridor.Actions.RIGHT for agent_id in obs})
assert obs == {'agent3': {'left': [False], 'position': [4], 'right': [True]}}
assert reward == {'agent3': -1}
assert done == {'agent3': False, '__all__': False}
obs, reward, done, info = sim.step({agent_id: Corridor.Actions.RIGHT for agent_id in obs})
assert obs == {'agent4': {'left': [True], 'position': [5], 'right': [False]}}
assert reward == {'agent4': -3}
assert done == {'agent4': False, '__all__': False}
obs, reward, done, info = sim.step({agent_id: Corridor.Actions.LEFT for agent_id in obs})
assert obs == {
'agent1': {'left': [True], 'position': [9], 'right': [False]},
'agent2': {'left': [False], 'position': [8], 'right': [False]}}
assert reward == {'agent1': 100, 'agent2': -1}
assert done == {'agent1': True, 'agent2': False, '__all__': False}
with pytest.raises(AssertionError):
sim.step({'agent1': Corridor.Actions.STAY})
obs, reward, done, info = sim.step({'agent2': Corridor.Actions.STAY})
assert obs == {'agent3': {'left': [False], 'position': [4], 'right': [True]}}
assert reward == {'agent3': -7,}
assert done == {'agent3': False, '__all__': False}
obs, reward, done, info = sim.step({agent_id: Corridor.Actions.LEFT for agent_id in obs})
assert obs == {'agent4': {'left': [False], 'position': [5], 'right': [False]}}
assert reward == {'agent4': -5,}
assert done == {'agent4': False, '__all__': False}
obs, reward, done, info = sim.step({agent_id: Corridor.Actions.RIGHT for agent_id in obs})
assert obs == {'agent2': {'left': [False], 'position': [8], 'right': [False]}}
assert reward == {'agent2': -1,}
assert done == {'agent2': False, '__all__': False}
obs, reward, done, info = sim.step({agent_id: Corridor.Actions.RIGHT for agent_id in obs})
assert obs == {'agent3': {'left': [False], 'position': [3], 'right': [False]}}
assert reward == {'agent3': -1,}
assert done == {'agent3': False, '__all__': False}
obs, reward, done, info = sim.step({agent_id: Corridor.Actions.RIGHT for agent_id in obs})
assert obs == {'agent4': {'left': [False], 'position': [6], 'right': [False]}}
assert reward == {'agent4': -1,}
assert done == {'agent4': False, '__all__': False}
obs, reward, done, info = sim.step({agent_id: Corridor.Actions.RIGHT for agent_id in obs})
assert obs == {
'agent2': {'left': [False], 'position': [9], 'right': [False]},
'agent3': {'left': [False], 'position': [4], 'right': [False]}}
assert reward == {'agent2': 100, 'agent3': -1}
assert done == {'agent2': True, 'agent3': False, '__all__': False}
with pytest.raises(AssertionError):
sim.step({'agent2': Corridor.Actions.STAY})
obs, reward, done, info = sim.step({'agent3': Corridor.Actions.RIGHT})
assert obs == {'agent4': {'left': [False], 'position': [7], 'right': [False]}}
assert reward == {'agent4': -1,}
assert done == {'agent4': False, '__all__': False}
obs, reward, done, info = sim.step({agent_id: Corridor.Actions.RIGHT for agent_id in obs})
assert obs == {'agent3': {'left': [False], 'position': [5], 'right': [False]}}
assert reward == {'agent3': -1,}
assert done == {'agent3': False, '__all__': False}
obs, reward, done, info = sim.step({agent_id: Corridor.Actions.RIGHT for agent_id in obs})
assert obs == {'agent4': {'left': [False], 'position': [8], 'right': [False]}}
assert reward == {'agent4': -1,}
assert done == {'agent4': False, '__all__': False}
obs, reward, done, info = sim.step({agent_id: Corridor.Actions.RIGHT for agent_id in obs})
assert obs == {'agent3': {'left': [False], 'position': [6], 'right': [False]}}
assert reward == {'agent3': -1,}
assert done == {'agent3': False, '__all__': False}
obs, reward, done, info = sim.step({agent_id: Corridor.Actions.RIGHT for agent_id in obs})
assert obs == {
'agent4': {'left': [False], 'position': [9], 'right': [False]},
'agent3': {'left': [False], 'position': [7], 'right': [False]}}
assert reward == {'agent4': 100, 'agent3': -1}
assert done == {'agent4': True, 'agent3': False, '__all__': False}
with pytest.raises(AssertionError):
sim.step({'agent4': Corridor.Actions.STAY})
obs, reward, done, info = sim.step({'agent3': Corridor.Actions.RIGHT})
assert obs == {'agent3': {'left': [False], 'position': [8], 'right': [False]}}
assert reward == {'agent3': -1,}
assert done == {'agent3': False, '__all__': False}
obs, reward, done, info = sim.step({agent_id: Corridor.Actions.RIGHT for agent_id in obs})
assert obs == {'agent3': {'left': [False], 'position': [9], 'right': [False]}}
assert reward == {'agent3': 100,}
assert done == {'agent3': True, '__all__': True}
| 48.181319
| 94
| 0.604858
| 1,108
| 8,769
| 4.637184
| 0.055957
| 0.059946
| 0.065784
| 0.086026
| 0.874854
| 0.856559
| 0.777735
| 0.774231
| 0.761191
| 0.683729
| 0
| 0.026235
| 0.187137
| 8,769
| 181
| 95
| 48.447514
| 0.694585
| 0
| 0
| 0.468966
| 0
| 0
| 0.155434
| 0
| 0
| 0
| 0
| 0
| 0.662069
| 1
| 0.013793
| false
| 0
| 0.027586
| 0
| 0.041379
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8b7f868d3f7c27e3e3be7a0e999e3115461c5714
| 114
|
py
|
Python
|
Trakttv.bundle/Contents/Libraries/Shared/plugin/core/backup/tasks/__init__.py
|
disrupted/Trakttv.bundle
|
24712216c71f3b22fd58cb5dd89dad5bb798ed60
|
[
"RSA-MD"
] | 1,346
|
2015-01-01T14:52:24.000Z
|
2022-03-28T12:50:48.000Z
|
Trakttv.bundle/Contents/Libraries/Shared/plugin/core/backup/tasks/__init__.py
|
alcroito/Plex-Trakt-Scrobbler
|
4f83fb0860dcb91f860d7c11bc7df568913c82a6
|
[
"RSA-MD"
] | 474
|
2015-01-01T10:27:46.000Z
|
2022-03-21T12:26:16.000Z
|
Trakttv.bundle/Contents/Libraries/Shared/plugin/core/backup/tasks/__init__.py
|
alcroito/Plex-Trakt-Scrobbler
|
4f83fb0860dcb91f860d7c11bc7df568913c82a6
|
[
"RSA-MD"
] | 191
|
2015-01-02T18:27:22.000Z
|
2022-03-29T10:49:48.000Z
|
from plugin.core.backup.tasks.archive import ArchiveTask
from plugin.core.backup.tasks.compact import CompactTask
| 38
| 56
| 0.859649
| 16
| 114
| 6.125
| 0.625
| 0.204082
| 0.285714
| 0.408163
| 0.510204
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070175
| 114
| 2
| 57
| 57
| 0.924528
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
8badb99acbe77ee93ccf5d2afbe378a855b01061
| 1,624
|
py
|
Python
|
Keras_tensorflow_nightly/source2.7/tensorflow/tools/api/generator/api/keras/metrics/__init__.py
|
Con-Mi/lambda-packs
|
b23a8464abdd88050b83310e1d0e99c54dac28ab
|
[
"MIT"
] | 3
|
2019-04-01T11:03:04.000Z
|
2019-12-31T02:17:15.000Z
|
Keras_tensorflow_nightly/source2.7/tensorflow/tools/api/generator/api/keras/metrics/__init__.py
|
Con-Mi/lambda-packs
|
b23a8464abdd88050b83310e1d0e99c54dac28ab
|
[
"MIT"
] | 1
|
2021-04-15T18:46:45.000Z
|
2021-04-15T18:46:45.000Z
|
Keras_tensorflow_nightly/source2.7/tensorflow/tools/api/generator/api/keras/metrics/__init__.py
|
Con-Mi/lambda-packs
|
b23a8464abdd88050b83310e1d0e99c54dac28ab
|
[
"MIT"
] | 1
|
2021-09-23T13:43:07.000Z
|
2021-09-23T13:43:07.000Z
|
"""Imports for Python API.
This file is MACHINE GENERATED! Do not edit.
Generated by: tensorflow/tools/api/generator/create_python_api.py script.
"""
from tensorflow.python.keras._impl.keras.losses import KLD as kullback_leibler_divergence
from tensorflow.python.keras._impl.keras.losses import MAE as mean_absolute_error
from tensorflow.python.keras._impl.keras.losses import MAPE as mean_absolute_percentage_error
from tensorflow.python.keras._impl.keras.losses import MSE as mean_squared_error
from tensorflow.python.keras._impl.keras.losses import MSLE as mean_squared_logarithmic_error
from tensorflow.python.keras._impl.keras.losses import binary_crossentropy
from tensorflow.python.keras._impl.keras.losses import categorical_crossentropy
from tensorflow.python.keras._impl.keras.losses import cosine as cosine_proximity
from tensorflow.python.keras._impl.keras.losses import hinge
from tensorflow.python.keras._impl.keras.losses import poisson
from tensorflow.python.keras._impl.keras.losses import sparse_categorical_crossentropy
from tensorflow.python.keras._impl.keras.losses import squared_hinge
from tensorflow.python.keras._impl.keras.metrics import binary_accuracy
from tensorflow.python.keras._impl.keras.metrics import categorical_accuracy
from tensorflow.python.keras._impl.keras.metrics import deserialize
from tensorflow.python.keras._impl.keras.metrics import get
from tensorflow.python.keras._impl.keras.metrics import serialize
from tensorflow.python.keras._impl.keras.metrics import sparse_top_k_categorical_accuracy
from tensorflow.python.keras._impl.keras.metrics import top_k_categorical_accuracy
| 67.666667
| 93
| 0.866995
| 233
| 1,624
| 5.83691
| 0.23176
| 0.195588
| 0.279412
| 0.349265
| 0.746324
| 0.746324
| 0.746324
| 0.738971
| 0.431618
| 0.198529
| 0
| 0
| 0.064655
| 1,624
| 24
| 94
| 67.666667
| 0.895326
| 0.088054
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
47742c33aedfdd4508d7eb82fdc9df7fa86c0e3e
| 53
|
py
|
Python
|
tests/test_d3heatmap.py
|
erdogant/d3heatmap
|
3755bcbea7893ee9487c1f7ae8212db0f1973ad4
|
[
"MIT"
] | 5
|
2021-06-18T19:46:06.000Z
|
2022-03-09T00:09:23.000Z
|
tests/test_d3heatmap.py
|
erdogant/d3heatmap
|
3755bcbea7893ee9487c1f7ae8212db0f1973ad4
|
[
"MIT"
] | null | null | null |
tests/test_d3heatmap.py
|
erdogant/d3heatmap
|
3755bcbea7893ee9487c1f7ae8212db0f1973ad4
|
[
"MIT"
] | 2
|
2021-04-05T22:38:56.000Z
|
2022-01-05T00:13:06.000Z
|
import d3heatmap as d3heatmap
def test_plot():
pass
| 13.25
| 29
| 0.792453
| 8
| 53
| 5.125
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.044444
| 0.150943
| 53
| 4
| 30
| 13.25
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
47b4b61f04c7c20009bb9b3358aa44adcf41ebbe
| 127
|
py
|
Python
|
Algorithm/Sum Questions/67. Add Binary - Sol 2 O(n) TS.py
|
smsubham/Data-Structure-Algorithms-Questions
|
45da68231907068ef4e4a0444ffdac69b337fa7c
|
[
"Apache-2.0"
] | null | null | null |
Algorithm/Sum Questions/67. Add Binary - Sol 2 O(n) TS.py
|
smsubham/Data-Structure-Algorithms-Questions
|
45da68231907068ef4e4a0444ffdac69b337fa7c
|
[
"Apache-2.0"
] | null | null | null |
Algorithm/Sum Questions/67. Add Binary - Sol 2 O(n) TS.py
|
smsubham/Data-Structure-Algorithms-Questions
|
45da68231907068ef4e4a0444ffdac69b337fa7c
|
[
"Apache-2.0"
] | null | null | null |
#https://leetcode.com/problems/add-binary/
#https://leetcode.com/problems/add-binary/discuss/279879/Python-easy-to-understand
| 31.75
| 82
| 0.795276
| 18
| 127
| 5.611111
| 0.666667
| 0.257426
| 0.316832
| 0.475248
| 0.653465
| 0.653465
| 0
| 0
| 0
| 0
| 0
| 0.048387
| 0.023622
| 127
| 3
| 83
| 42.333333
| 0.766129
| 0.96063
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d02d8ae0b2389a0b84774d7588278f252543307d
| 1,879
|
py
|
Python
|
src/apps/tasks/migrations/0002_auto_20191203_1911.py
|
binfeng1018/competitions-v2
|
173ea6053b7eda5de3a9f1a687dfb0d43bfc4e9c
|
[
"Apache-2.0"
] | 19
|
2018-07-27T19:14:10.000Z
|
2021-12-08T16:34:42.000Z
|
src/apps/tasks/migrations/0002_auto_20191203_1911.py
|
binfeng1018/competitions-v2
|
173ea6053b7eda5de3a9f1a687dfb0d43bfc4e9c
|
[
"Apache-2.0"
] | 516
|
2017-07-27T15:45:43.000Z
|
2022-02-10T07:57:46.000Z
|
src/apps/tasks/migrations/0002_auto_20191203_1911.py
|
binfeng1018/competitions-v2
|
173ea6053b7eda5de3a9f1a687dfb0d43bfc4e9c
|
[
"Apache-2.0"
] | 16
|
2018-01-01T19:07:01.000Z
|
2021-09-17T07:59:59.000Z
|
# Generated by Django 2.1.11 on 2019-12-03 19:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tasks', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='solution',
name='chahub_data_hash',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='solution',
name='chahub_needs_retry',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='solution',
name='chahub_timestamp',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='solution',
name='deleted',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='solution',
name='is_public',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='solution',
name='md5',
field=models.CharField(blank=True, max_length=32, null=True),
),
migrations.AddField(
model_name='task',
name='chahub_data_hash',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='task',
name='chahub_needs_retry',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='task',
name='chahub_timestamp',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='task',
name='deleted',
field=models.BooleanField(default=False),
),
]
| 29.359375
| 73
| 0.548696
| 171
| 1,879
| 5.894737
| 0.298246
| 0.178571
| 0.228175
| 0.267857
| 0.810516
| 0.810516
| 0.810516
| 0.723214
| 0.723214
| 0.684524
| 0
| 0.018415
| 0.335285
| 1,879
| 63
| 74
| 29.825397
| 0.788631
| 0.024481
| 0
| 0.824561
| 1
| 0
| 0.113053
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.017544
| 0
| 0.070175
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
d032448f50fd3a13785e1869875317d4aa3e2f51
| 17,019
|
py
|
Python
|
canvas_sdk/methods/appointment_groups.py
|
david-house-harvard/canvas_python_sdk
|
9c2e59621a9a5667bc43c253ef801482b241a2c1
|
[
"MIT"
] | 21
|
2015-06-12T13:49:04.000Z
|
2021-11-08T05:37:44.000Z
|
canvas_sdk/methods/appointment_groups.py
|
david-house-harvard/canvas_python_sdk
|
9c2e59621a9a5667bc43c253ef801482b241a2c1
|
[
"MIT"
] | 44
|
2015-02-04T15:26:52.000Z
|
2021-12-03T17:47:00.000Z
|
canvas_sdk/methods/appointment_groups.py
|
david-house-harvard/canvas_python_sdk
|
9c2e59621a9a5667bc43c253ef801482b241a2c1
|
[
"MIT"
] | 7
|
2015-07-20T23:56:03.000Z
|
2021-02-23T17:13:00.000Z
|
from canvas_sdk import client, utils
def list_appointment_groups(request_ctx, scope=None, context_codes=None, include_past_appointments=None, include=None, **request_kwargs):
"""
Retrieve the list of appointment groups that can be reserved or managed by
the current user.
:param request_ctx: The request context
:type request_ctx: :class:RequestContext
:param scope: (optional) Defaults to "reservable"
:type scope: string or None
:param context_codes: (optional) Array of context codes used to limit returned results.
:type context_codes: string or None
:param include_past_appointments: (optional) Defaults to false. If true, includes past appointment groups
:type include_past_appointments: boolean or None
:param include: (optional) Array of additional information to include. "appointments":: calendar event time slots for this appointment group "child_events":: reservations of those time slots "participant_count":: number of reservations "reserved_times":: the event id, start time and end time of reservations the current user has made)
:type include: string or None
:return: List appointment groups
:rtype: requests.Response (with void data)
"""
scope_types = ('reservable', 'manageable')
include_types = ('appointments', 'child_events', 'participant_count', 'reserved_times')
utils.validate_attr_is_acceptable(scope, scope_types)
utils.validate_attr_is_acceptable(include, include_types)
path = '/v1/appointment_groups'
payload = {
'scope' : scope,
'context_codes' : context_codes,
'include_past_appointments' : include_past_appointments,
'include' : include,
}
url = request_ctx.base_api_url + path.format()
response = client.get(request_ctx, url, payload=payload, **request_kwargs)
return response
def create_appointment_group(request_ctx, appointment_group_context_codes, appointment_group_sub_context_codes=None, appointment_group_title=None, appointment_group_description=None, appointment_group_location_name=None, appointment_group_location_address=None, appointment_group_publish=None, appointment_group_participants_per_appointment=None, appointment_group_min_appointments_per_participant=None, appointment_group_max_appointments_per_participant=None, appointment_group_new_appointments_X=None, appointment_group_participant_visibility=None, **request_kwargs):
"""
Create and return a new appointment group. If new_appointments are
specified, the response will return a new_appointments array (same format
as appointments array, see "List appointment groups" action)
:param request_ctx: The request context
:type request_ctx: :class:RequestContext
:param appointment_group_context_codes: (required) Array of context codes (courses, e.g. course_1) this group should be linked to (1 or more). Users in the course(s) with appropriate permissions will be able to sign up for this appointment group.
:type appointment_group_context_codes: string
:param appointment_group_sub_context_codes: (optional) Array of sub context codes (course sections or a single group category) this group should be linked to. Used to limit the appointment group to particular sections. If a group category is specified, students will sign up in groups and the participant_type will be "Group" instead of "User".
:type appointment_group_sub_context_codes: string or None
:param appointment_group_title: (optional) Short title for the appointment group.
:type appointment_group_title: string or None
:param appointment_group_description: (optional) Longer text description of the appointment group.
:type appointment_group_description: string or None
:param appointment_group_location_name: (optional) Location name of the appointment group.
:type appointment_group_location_name: string or None
:param appointment_group_location_address: (optional) Location address.
:type appointment_group_location_address: string or None
:param appointment_group_publish: (optional) Indicates whether this appointment group should be published (i.e. made available for signup). Once published, an appointment group cannot be unpublished. Defaults to false.
:type appointment_group_publish: boolean or None
:param appointment_group_participants_per_appointment: (optional) Maximum number of participants that may register for each time slot. Defaults to null (no limit).
:type appointment_group_participants_per_appointment: integer or None
:param appointment_group_min_appointments_per_participant: (optional) Minimum number of time slots a user must register for. If not set, users do not need to sign up for any time slots.
:type appointment_group_min_appointments_per_participant: integer or None
:param appointment_group_max_appointments_per_participant: (optional) Maximum number of time slots a user may register for.
:type appointment_group_max_appointments_per_participant: integer or None
:param appointment_group_new_appointments_X: (optional) Nested array of start time/end time pairs indicating time slots for this appointment group. Refer to the example request.
:type appointment_group_new_appointments_X: string or None
:param appointment_group_participant_visibility: (optional) "private":: participants cannot see who has signed up for a particular time slot "protected":: participants can see who has signed up. Defaults to "private".
:type appointment_group_participant_visibility: string or None
:return: Create an appointment group
:rtype: requests.Response (with void data)
"""
appointment_group_participant_visibility_types = ('private', 'protected')
utils.validate_attr_is_acceptable(appointment_group_participant_visibility, appointment_group_participant_visibility_types)
path = '/v1/appointment_groups'
payload = {
'appointment_group[context_codes]' : appointment_group_context_codes,
'appointment_group[sub_context_codes]' : appointment_group_sub_context_codes,
'appointment_group[title]' : appointment_group_title,
'appointment_group[description]' : appointment_group_description,
'appointment_group[location_name]' : appointment_group_location_name,
'appointment_group[location_address]' : appointment_group_location_address,
'appointment_group[publish]' : appointment_group_publish,
'appointment_group[participants_per_appointment]' : appointment_group_participants_per_appointment,
'appointment_group[min_appointments_per_participant]' : appointment_group_min_appointments_per_participant,
'appointment_group[max_appointments_per_participant]' : appointment_group_max_appointments_per_participant,
'appointment_group[new_appointments][X]' : appointment_group_new_appointments_X,
'appointment_group[participant_visibility]' : appointment_group_participant_visibility,
}
url = request_ctx.base_api_url + path.format()
response = client.post(request_ctx, url, payload=payload, **request_kwargs)
return response
def get_single_appointment_group(request_ctx, id, include=None, **request_kwargs):
"""
Returns information for a single appointment group
:param request_ctx: The request context
:type request_ctx: :class:RequestContext
:param id: (required) ID
:type id: string
:param include: (optional) Array of additional information to include. Ssee include[] argument of "List appointment groups" action. "child_events":: reservations of time slots time slots "appointments":: will always be returned
:type include: string or None
:return: Get a single appointment group
:rtype: requests.Response (with void data)
"""
include_types = ('child_events', 'appointments')
utils.validate_attr_is_acceptable(include, include_types)
path = '/v1/appointment_groups/{id}'
payload = {
'include' : include,
}
url = request_ctx.base_api_url + path.format(id=id)
response = client.get(request_ctx, url, payload=payload, **request_kwargs)
return response
def update_appointment_group(request_ctx, id, appointment_group_context_codes, appointment_group_sub_context_codes=None, appointment_group_title=None, appointment_group_description=None, appointment_group_location_name=None, appointment_group_location_address=None, appointment_group_publish=None, appointment_group_participants_per_appointment=None, appointment_group_min_appointments_per_participant=None, appointment_group_max_appointments_per_participant=None, appointment_group_new_appointments_X=None, appointment_group_participant_visibility=None, **request_kwargs):
"""
Update and return an appointment group. If new_appointments are specified,
the response will return a new_appointments array (same format as
appointments array, see "List appointment groups" action).
:param request_ctx: The request context
:type request_ctx: :class:RequestContext
:param id: (required) ID
:type id: string
:param appointment_group_context_codes: (required) Array of context codes (courses, e.g. course_1) this group should be linked to (1 or more). Users in the course(s) with appropriate permissions will be able to sign up for this appointment group.
:type appointment_group_context_codes: string
:param appointment_group_sub_context_codes: (optional) Array of sub context codes (course sections or a single group category) this group should be linked to. Used to limit the appointment group to particular sections. If a group category is specified, students will sign up in groups and the participant_type will be "Group" instead of "User".
:type appointment_group_sub_context_codes: string or None
:param appointment_group_title: (optional) Short title for the appointment group.
:type appointment_group_title: string or None
:param appointment_group_description: (optional) Longer text description of the appointment group.
:type appointment_group_description: string or None
:param appointment_group_location_name: (optional) Location name of the appointment group.
:type appointment_group_location_name: string or None
:param appointment_group_location_address: (optional) Location address.
:type appointment_group_location_address: string or None
:param appointment_group_publish: (optional) Indicates whether this appointment group should be published (i.e. made available for signup). Once published, an appointment group cannot be unpublished. Defaults to false.
:type appointment_group_publish: boolean or None
:param appointment_group_participants_per_appointment: (optional) Maximum number of participants that may register for each time slot. Defaults to null (no limit).
:type appointment_group_participants_per_appointment: integer or None
:param appointment_group_min_appointments_per_participant: (optional) Minimum number of time slots a user must register for. If not set, users do not need to sign up for any time slots.
:type appointment_group_min_appointments_per_participant: integer or None
:param appointment_group_max_appointments_per_participant: (optional) Maximum number of time slots a user may register for.
:type appointment_group_max_appointments_per_participant: integer or None
:param appointment_group_new_appointments_X: (optional) Nested array of start time/end time pairs indicating time slots for this appointment group. Refer to the example request.
:type appointment_group_new_appointments_X: string or None
:param appointment_group_participant_visibility: (optional) "private":: participants cannot see who has signed up for a particular time slot "protected":: participants can see who has signed up. Defaults to "private".
:type appointment_group_participant_visibility: string or None
:return: Update an appointment group
:rtype: requests.Response (with void data)
"""
appointment_group_participant_visibility_types = ('private', 'protected')
utils.validate_attr_is_acceptable(appointment_group_participant_visibility, appointment_group_participant_visibility_types)
path = '/v1/appointment_groups/{id}'
payload = {
'appointment_group[context_codes]' : appointment_group_context_codes,
'appointment_group[sub_context_codes]' : appointment_group_sub_context_codes,
'appointment_group[title]' : appointment_group_title,
'appointment_group[description]' : appointment_group_description,
'appointment_group[location_name]' : appointment_group_location_name,
'appointment_group[location_address]' : appointment_group_location_address,
'appointment_group[publish]' : appointment_group_publish,
'appointment_group[participants_per_appointment]' : appointment_group_participants_per_appointment,
'appointment_group[min_appointments_per_participant]' : appointment_group_min_appointments_per_participant,
'appointment_group[max_appointments_per_participant]' : appointment_group_max_appointments_per_participant,
'appointment_group[new_appointments][X]' : appointment_group_new_appointments_X,
'appointment_group[participant_visibility]' : appointment_group_participant_visibility,
}
url = request_ctx.base_api_url + path.format(id=id)
response = client.put(request_ctx, url, payload=payload, **request_kwargs)
return response
def delete_appointment_group(request_ctx, id, cancel_reason=None, **request_kwargs):
"""
Delete an appointment group (and associated time slots and reservations)
and return the deleted group
:param request_ctx: The request context
:type request_ctx: :class:RequestContext
:param id: (required) ID
:type id: string
:param cancel_reason: (optional) Reason for deleting/canceling the appointment group.
:type cancel_reason: string or None
:return: Delete an appointment group
:rtype: requests.Response (with void data)
"""
path = '/v1/appointment_groups/{id}'
payload = {
'cancel_reason' : cancel_reason,
}
url = request_ctx.base_api_url + path.format(id=id)
response = client.delete(request_ctx, url, payload=payload, **request_kwargs)
return response
def list_user_participants(request_ctx, id, registration_status=None, **request_kwargs):
"""
List users that are (or may be) participating in this appointment group.
Refer to the Users API for the response fields. Returns no results for
appointment groups with the "Group" participant_type.
:param request_ctx: The request context
:type request_ctx: :class:RequestContext
:param id: (required) ID
:type id: string
:param registration_status: (optional) Limits results to the a given participation status, defaults to "all"
:type registration_status: string or None
:return: List user participants
:rtype: requests.Response (with void data)
"""
registration_status_types = ('all', 'registered', 'registered')
utils.validate_attr_is_acceptable(registration_status, registration_status_types)
path = '/v1/appointment_groups/{id}/users'
payload = {
'registration_status' : registration_status,
}
url = request_ctx.base_api_url + path.format(id=id)
response = client.get(request_ctx, url, payload=payload, **request_kwargs)
return response
def list_student_group_participants(request_ctx, id, registration_status=None, **request_kwargs):
"""
List student groups that are (or may be) participating in this appointment
group. Refer to the Groups API for the response fields. Returns no results
for appointment groups with the "User" participant_type.
:param request_ctx: The request context
:type request_ctx: :class:RequestContext
:param id: (required) ID
:type id: string
:param registration_status: (optional) Limits results to the a given participation status, defaults to "all"
:type registration_status: string or None
:return: List student group participants
:rtype: requests.Response (with void data)
"""
registration_status_types = ('all', 'registered', 'registered')
utils.validate_attr_is_acceptable(registration_status, registration_status_types)
path = '/v1/appointment_groups/{id}/groups'
payload = {
'registration_status' : registration_status,
}
url = request_ctx.base_api_url + path.format(id=id)
response = client.get(request_ctx, url, payload=payload, **request_kwargs)
return response
| 62.340659
| 573
| 0.761149
| 2,106
| 17,019
| 5.881292
| 0.095916
| 0.204101
| 0.040691
| 0.035524
| 0.880107
| 0.868481
| 0.85306
| 0.852172
| 0.848216
| 0.835298
| 0
| 0.000782
| 0.172983
| 17,019
| 272
| 574
| 62.569853
| 0.879218
| 0.56766
| 0
| 0.708333
| 0
| 0
| 0.200589
| 0.162325
| 0
| 0
| 0
| 0
| 0
| 1
| 0.072917
| false
| 0
| 0.010417
| 0
| 0.15625
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d0476ac6ea8d4f30143c0dbff2409edda5c03fe4
| 8,509
|
py
|
Python
|
ample/util/tests/test_reference_manager.py
|
fsimkovic/ample
|
c3c2196ca292e831e3cd8d15e3d3079bb6609848
|
[
"BSD-3-Clause"
] | 6
|
2017-03-17T14:43:14.000Z
|
2021-08-06T07:07:14.000Z
|
ample/util/tests/test_reference_manager.py
|
fsimkovic/ample
|
c3c2196ca292e831e3cd8d15e3d3079bb6609848
|
[
"BSD-3-Clause"
] | 47
|
2017-03-17T14:37:09.000Z
|
2021-01-28T10:22:15.000Z
|
ample/util/tests/test_reference_manager.py
|
fsimkovic/ample
|
c3c2196ca292e831e3cd8d15e3d3079bb6609848
|
[
"BSD-3-Clause"
] | 6
|
2017-09-26T08:45:09.000Z
|
2020-03-19T14:26:49.000Z
|
"""Test functions for util.ample_util"""
import sys
import unittest
from ample.util import reference_manager
class Test(unittest.TestCase):
def test_construct_references(self):
# import argparse
from ample.util import config_util, argparse_util
options = config_util.AMPLEConfigOptions()
argso = argparse_util.process_command_line(args=['-mtz', 'foo', '-fasta', 'bar'])
options.populate(argso)
refMgr = reference_manager.ReferenceManager(options.d)
ref_references = '* Bibby et al. (2012). AMPLE: A cluster-and-truncate approach to solve the crystal structures of small proteins using rapidly computed ab initio models. Acta Crystallogr. Sect. D Biol. Crystallogr. 68(12), 1622-1631. [doi:10.1107/S0907444912039194]\n\n* Winn et al. (2011). Overview of the CCP4 suite and current developments. Acta Crystallographica Section D 67(4), 235-242. [doi:10.1107/S0907444910045749]\n\n* Thomas et al. (2015). Routine phasing of coiled-coil protein crystal structures with AMPLE. IUCrJ 2(2), 198-206. [doi:10.1107/S2052252515002080]\n\n* Simkovic et al. (2016). Residue contacts predicted by evolutionary covariance extend the application of ab initio molecular replacement to larger and more challenging protein folds. IUCrJ 3(4), 259-270. [doi:10.1107/S2052252516008113]\n\n* Bradley et al. (2005). Toward High-Resolution de Novo Structure Prediction for Small Proteins. Science 309(5742), 1868-1871. [doi:10.1126/science.1113801]\n\n* Grosse-Kunstleve et al. (2002). The Computational Crystallography Toolbox: crystallographic algorithms in a reusable software framework. Journal of Applied Crystallography 35(1), 126-136. [doi:10.1107/S0021889801017824]\n\n* Theobald et al. (2006). THESEUS: maximum likelihood superpositioning and analysis of macromolecular structures. Bioinformatics 22(17), 2171-2172. [doi:10.1093/bioinformatics/btl332]\n\n* Krissinel et al. (2012). Enhanced fold recognition using efficient short fragment clustering. Journal of molecular biochemistry 1(2), 76-85. [doi:]\n\n* Zhang et al. (2004). SPICKER: A clustering approach to identify near-native protein folds. Journal of Computational Chemistry 25(6), 865-871. [doi:10.1002/jcc.20011]\n\n* Keegan et al. (2018). Recent developments in MrBUMP: better search-model preparation, graphical interaction with search models, and solution improvement and assessment. Acta Crystallographica Section D 74(3), 167-182. [doi:10.1107/S2059798318003455]\n\n* Murshudov et al. (1997). Refinement of macromolecular structures by the maximum-likelihood method. Acta Crystallogr. Sect. D Biol. Crystallogr. 53(3), 240-255. [doi:10.1107/S0907444996012255]\n\n* Thorn et al. (2013). Extending molecular-replacement solutions with SHELXE. Acta Crystallogr. Sect. D Biol. Crystallogr. 69(11), 2251-2256. [doi:10.1107/S0907444913027534]\n\n* Cohen et al. (2008). ARP/wARP and molecular replacement: the next generation. Acta Crystallogr. Sect. D Biol. Crystallogr. 64(1), 49-60. [doi:10.1107/S0907444907047580]\n\n'
ref_references_windows = '* Bibby et al. (2012). AMPLE: A cluster-and-truncate approach to solve the crystal structures of small proteins using rapidly computed ab initio models. Acta Crystallogr. Sect. D Biol. Crystallogr. 68(12), 1622-1631. [doi:10.1107/S0907444912039194]\r\n\r\n* Winn et al. (2011). Overview of the CCP4 suite and current developments. Acta Crystallographica Section D 67(4), 235-242. [doi:10.1107/S0907444910045749]\r\n\r\n* Thomas et al. (2015). Routine phasing of coiled-coil protein crystal structures with AMPLE. IUCrJ 2(2), 198-206. [doi:10.1107/S2052252515002080]\r\n\r\n* Simkovic et al. (2016). Residue contacts predicted by evolutionary covariance extend the application of ab initio molecular replacement to larger and more challenging protein folds. IUCrJ 3(4), 259-270. [doi:10.1107/S2052252516008113]\r\n\r\n* Bradley et al. (2005). Toward High-Resolution de Novo Structure Prediction for Small Proteins. Science 309(5742), 1868-1871. [doi:10.1126/science.1113801]\r\n\r\n* Grosse-Kunstleve et al. (2002). The Computational Crystallography Toolbox: crystallographic algorithms in a reusable software framework. Journal of Applied Crystallography 35(1), 126-136. [doi:10.1107/S0021889801017824]\r\n\r\n* Theobald et al. (2006). THESEUS: maximum likelihood superpositioning and analysis of macromolecular structures. Bioinformatics 22(17), 2171-2172. [doi:10.1093/bioinformatics/btl332]\r\n\r\n* Krissinel et al. (2012). Enhanced fold recognition using efficient short fragment clustering. Journal of molecular biochemistry 1(2), 76-85. [doi:]\r\n\r\n* Zhang et al. (2004). SPICKER: A clustering approach to identify near-native protein folds. Journal of Computational Chemistry 25(6), 865-871. [doi:10.1002/jcc.20011]\r\n\r\n* Keegan et al. (2018). Recent developments in MrBUMP: better search-model preparation, graphical interaction with search models, and solution improvement and assessment. Acta Crystallographica Section D 74(3), 167-182. [doi:10.1107/S2059798318003455]\r\n\r\n* Murshudov et al. (1997). Refinement of macromolecular structures by the maximum-likelihood method. Acta Crystallogr. Sect. D Biol. Crystallogr. 53(3), 240-255. [doi:10.1107/S0907444996012255]\r\n\r\n* Thorn et al. (2013). Extending molecular-replacement solutions with SHELXE. Acta Crystallogr. Sect. D Biol. Crystallogr. 69(11), 2251-2256. [doi:10.1107/S0907444913027534]\r\n\r\n* Cohen et al. (2008). ARP/wARP and molecular replacement: the next generation. Acta Crystallogr. Sect. D Biol. Crystallogr. 64(1), 49-60. [doi:10.1107/S0907444907047580]\r\n\r\n'
# We may not run (e.g.) arpwarp, so need to be tolerant of missing citations.
if sys.platform.startswith("win"):
self.assertGreaterEqual(ref_references_windows.index(refMgr.citation_list_as_text), 0)
else:
self.assertGreaterEqual(ref_references.index(refMgr.citation_list_as_text), 0)
options.d['nmr_model_in'] = 'foo'
options.d['transmembrane'] = True
options.d['use_scwrl'] = True
options.d['do_mr'] = False
refMgr = reference_manager.ReferenceManager(options.d)
ref_references = '<h3>References</h3><ol><li> Bibby et al. (2012). AMPLE: A cluster-and-truncate approach to solve the crystal structures of small proteins using rapidly computed ab initio models. Acta Crystallogr. Sect. D Biol. Crystallogr. 68(12), 1622-1631. [doi:10.1107/S0907444912039194]</li><li> Winn et al. (2011). Overview of the CCP4 suite and current developments. Acta Crystallographica Section D 67(4), 235-242. [doi:10.1107/S0907444910045749]</li><li> Thomas et al. (2015). Routine phasing of coiled-coil protein crystal structures with AMPLE. IUCrJ 2(2), 198-206. [doi:10.1107/S2052252515002080]</li><li> Simkovic et al. (2016). Residue contacts predicted by evolutionary covariance extend the application of ab initio molecular replacement to larger and more challenging protein folds. IUCrJ 3(4), 259-270. [doi:10.1107/S2052252516008113]</li><li> Bradley et al. (2005). Toward High-Resolution de Novo Structure Prediction for Small Proteins. Science 309(5742), 1868-1871. [doi:10.1126/science.1113801]</li><li> Bibby et al. (2013). Application of the AMPLE cluster-and-truncate approach to NMR structures for molecular replacement. Acta Crystallogr. Sect. D Biol. Crystallogr. 69(11), 2194-2201. [doi:10.1107/S0907444913018453]</li><li> Thomas et al. (2017). Approaches to ab initio molecular replacement of alpha-helical transmembrane proteins. Acta Crystallographica Section D 73(12), 985-996. [doi:10.1107/S2059798317016436]</li><li> Grosse-Kunstleve et al. (2002). The Computational Crystallography Toolbox: crystallographic algorithms in a reusable software framework. Journal of Applied Crystallography 35(1), 126-136. [doi:10.1107/S0021889801017824]</li><li> Theobald et al. (2006). THESEUS: maximum likelihood superpositioning and analysis of macromolecular structures. Bioinformatics 22(17), 2171-2172. [doi:10.1093/bioinformatics/btl332]</li><li> Krissinel et al. (2012). Enhanced fold recognition using efficient short fragment clustering. Journal of molecular biochemistry 1(2), 76-85. [doi:]</li><li> Krivov et al. (2009). Improved prediction of protein side-chain conformations with SCWRL4. Proteins: Struct., Funct., Bioinf. 77(4), 778-795. [doi:10.1002/prot.22488]</li></ol>'
self.assertEqual(refMgr.citations_as_html, ref_references)
if __name__ == "__main__":
unittest.main()
| 223.921053
| 2,587
| 0.762369
| 1,260
| 8,509
| 5.116667
| 0.242857
| 0.022956
| 0.0349
| 0.008066
| 0.82581
| 0.818675
| 0.818675
| 0.809369
| 0.785016
| 0.785016
| 0
| 0.156544
| 0.125397
| 8,509
| 37
| 2,588
| 229.972973
| 0.709755
| 0.014925
| 0
| 0.08
| 0
| 0.12
| 0.872716
| 0.162746
| 0
| 0
| 0
| 0
| 0.12
| 1
| 0.04
| false
| 0
| 0.16
| 0
| 0.24
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d059037fcf74aa6b62ae2ba874e34bd2f9b5d844
| 777,868
|
py
|
Python
|
venv/lib/python3.6/site-packages/ansible_collections/fortinet/fortios/plugins/modules/fortios_wireless_controller_wtp_profile.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 1
|
2020-01-22T13:11:23.000Z
|
2020-01-22T13:11:23.000Z
|
venv/lib/python3.6/site-packages/ansible_collections/fortinet/fortios/plugins/modules/fortios_wireless_controller_wtp_profile.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 12
|
2020-02-21T07:24:52.000Z
|
2020-04-14T09:54:32.000Z
|
venv/lib/python3.6/site-packages/ansible_collections/fortinet/fortios/plugins/modules/fortios_wireless_controller_wtp_profile.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019-2020 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_wireless_controller_wtp_profile
short_description: Configure WTP profiles or FortiAP profiles that define radio settings for manageable FortiAP platforms in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify wireless_controller feature and wtp_profile category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.0
version_added: "2.10"
author:
- Link Zheng (@chillancezen)
- Jie Xue (@JieX19)
- Hongbin Lu (@fgtdev-hblu)
- Frank Shen (@frankshen01)
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Legacy fortiosapi has been deprecated, httpapi is the preferred way to run playbooks
requirements:
- ansible>=2.9.0
options:
access_token:
description:
- Token-based authentication.
Generated from GUI of Fortigate.
type: str
required: false
enable_log:
description:
- Enable/Disable logging for task.
type: bool
required: false
default: false
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
member_path:
type: str
description:
- Member attribute path to operate on.
- Delimited by a slash character if there are more than one attribute.
- Parameter marked with member_path is legitimate for doing member operation.
member_state:
type: str
description:
- Add or delete a member under specified attribute path.
- When member_state is specified, the state option is ignored.
choices:
- present
- absent
state:
description:
- Indicates whether to create or remove the object.
type: str
required: true
choices:
- present
- absent
wireless_controller_wtp_profile:
description:
- Configure WTP profiles or FortiAP profiles that define radio settings for manageable FortiAP platforms.
default: null
type: dict
suboptions:
allowaccess:
description:
- Control management access to the managed WTP, FortiAP, or AP. Separate entries with a space.
type: list
choices:
- telnet
- http
- https
- ssh
- snmp
ap_country:
description:
- Country in which this WTP, FortiAP or AP will operate .
type: str
choices:
- NA
- AL
- DZ
- AO
- AR
- AM
- AU
- AT
- AZ
- BH
- BD
- BB
- BY
- BE
- BZ
- BO
- BA
- BR
- BN
- BG
- KH
- CL
- CN
- CO
- CR
- HR
- CY
- CZ
- DK
- DO
- EC
- EG
- SV
- EE
- FI
- FR
- GE
- DE
- GR
- GL
- GD
- GU
- GT
- HT
- HN
- HK
- HU
- IS
- IN
- ID
- IR
- IE
- IL
- IT
- JM
- JO
- KZ
- KE
- KP
- KR
- KW
- LV
- LB
- LI
- LT
- LU
- MO
- MK
- MY
- MT
- MX
- MC
- MA
- MZ
- MM
- NP
- NL
- AN
- AW
- NZ
- NO
- OM
- PK
- PA
- PG
- PY
- PE
- PH
- PL
- PT
- PR
- QA
- RO
- RU
- RW
- SA
- RS
- ME
- SG
- SK
- SI
- ZA
- ES
- LK
- SE
- SD
- CH
- SY
- TW
- TZ
- TH
- TT
- TN
- TR
- AE
- UA
- GB
- US
- PS
- UY
- UZ
- VE
- VN
- YE
- ZB
- ZW
- JP
- CA
- CF
- BS
- BF
- KY
- CX
- GH
- GY
- CI
- MW
- MV
- FM
- KN
- LC
- VC
- SN
- TM
- TC
- UG
- VU
- AF
- AS
- BJ
- BM
- BT
- BW
- CM
- TD
- CG
- CD
- DM
- ET
- GF
- PF
- FO
- FJ
- GI
- GP
- IQ
- IM
- LA
- LS
- LY
- MG
- ML
- MH
- MQ
- MR
- MU
- YT
- MD
- NI
- NE
- MP
- PW
- RE
- BL
- MF
- PM
- SL
- SR
- TG
- VI
- WF
- ZM
- --
ap_handoff:
description:
- Enable/disable AP handoff of clients to other APs .
type: str
choices:
- enable
- disable
apcfg_profile:
description:
- AP local configuration profile name. Source wireless-controller.apcfg-profile.name.
type: str
ble_profile:
description:
- Bluetooth Low Energy profile name. Source wireless-controller.ble-profile.name.
type: str
comment:
description:
- Comment.
type: str
console_login:
description:
- Enable/disable FAP console login access .
type: str
choices:
- enable
- disable
control_message_offload:
description:
- Enable/disable CAPWAP control message data channel offload.
type: list
choices:
- ebp-frame
- aeroscout-tag
- ap-list
- sta-list
- sta-cap-list
- stats
- aeroscout-mu
- sta-health
- spectral-analysis
deny_mac_list:
description:
- List of MAC addresses that are denied access to this WTP, FortiAP, or AP.
type: list
suboptions:
id:
description:
- ID.
required: true
type: int
mac:
description:
- A WiFi device with this MAC address is denied access to this WTP, FortiAP or AP.
type: str
dtls_in_kernel:
description:
- Enable/disable data channel DTLS in kernel.
type: str
choices:
- enable
- disable
dtls_policy:
description:
- WTP data channel DTLS policy .
type: list
choices:
- clear-text
- dtls-enabled
- ipsec-vpn
energy_efficient_ethernet:
description:
- Enable/disable use of energy efficient Ethernet on WTP.
type: str
choices:
- enable
- disable
esl_ses_dongle:
description:
- ESL SES-imagotag dongle configuration.
type: dict
suboptions:
apc_addr_type:
description:
- ESL SES-imagotag APC address type .
type: str
choices:
- fqdn
- ip
apc_fqdn:
description:
- FQDN of ESL SES-imagotag Access Point Controller (APC).
type: str
apc_ip:
description:
- IP address of ESL SES-imagotag Access Point Controller (APC).
type: str
apc_port:
description:
- Port of ESL SES-imagotag Access Point Controller (APC).
type: int
coex_level:
description:
- ESL SES-imagotag dongle coexistence level .
type: str
choices:
- none
compliance_level:
description:
- Compliance levels for the ESL solution integration .
type: str
choices:
- compliance-level-2
esl_channel:
description:
- ESL SES-imagotag dongle channel .
type: str
choices:
- -1
- 0
- 1
- 2
- 3
- 4
- 5
- 6
- 7
- 8
- 9
- 10
- 127
output_power:
description:
- ESL SES-imagotag dongle output power .
type: str
choices:
- a
- b
- c
- d
- e
- f
- g
- h
scd_enable:
description:
- Enable/disable ESL SES-imagotag Serial Communication Daemon (SCD) .
type: str
choices:
- enable
- disable
tls_cert_verification:
description:
- Enable/disable TLS Certificate verification. .
type: str
choices:
- enable
- disable
tls_fqdn_verification:
description:
- Enable/disable TLS Certificate verification. .
type: str
choices:
- enable
- disable
ext_info_enable:
description:
- Enable/disable station/VAP/radio extension information.
type: str
choices:
- enable
- disable
frequency_handoff:
description:
- Enable/disable frequency handoff of clients to other channels .
type: str
choices:
- enable
- disable
handoff_roaming:
description:
- Enable/disable client load balancing during roaming to avoid roaming delay .
type: str
choices:
- enable
- disable
handoff_rssi:
description:
- Minimum received signal strength indicator (RSSI) value for handoff (20 - 30).
type: int
handoff_sta_thresh:
description:
- Threshold value for AP handoff.
type: int
indoor_outdoor_deployment:
description:
- Set to allow indoor/outdoor-only channels under regulatory rules .
type: str
choices:
- platform-determined
- outdoor
- indoor
ip_fragment_preventing:
description:
- Select how to prevent IP fragmentation for CAPWAP tunneled control and data packets .
type: list
choices:
- tcp-mss-adjust
- icmp-unreachable
lan:
description:
- WTP LAN port mapping.
type: dict
suboptions:
port_esl_mode:
description:
- ESL port mode.
type: str
choices:
- offline
- nat-to-wan
- bridge-to-wan
- bridge-to-ssid
port_esl_ssid:
description:
- Bridge ESL port to SSID. Source system.interface.name.
type: str
port_mode:
description:
- LAN port mode.
type: str
choices:
- offline
- nat-to-wan
- bridge-to-wan
- bridge-to-ssid
port_ssid:
description:
- Bridge LAN port to SSID. Source wireless-controller.vap.name.
type: str
port1_mode:
description:
- LAN port 1 mode.
type: str
choices:
- offline
- nat-to-wan
- bridge-to-wan
- bridge-to-ssid
port1_ssid:
description:
- Bridge LAN port 1 to SSID. Source wireless-controller.vap.name.
type: str
port2_mode:
description:
- LAN port 2 mode.
type: str
choices:
- offline
- nat-to-wan
- bridge-to-wan
- bridge-to-ssid
port2_ssid:
description:
- Bridge LAN port 2 to SSID. Source wireless-controller.vap.name.
type: str
port3_mode:
description:
- LAN port 3 mode.
type: str
choices:
- offline
- nat-to-wan
- bridge-to-wan
- bridge-to-ssid
port3_ssid:
description:
- Bridge LAN port 3 to SSID. Source wireless-controller.vap.name.
type: str
port4_mode:
description:
- LAN port 4 mode.
type: str
choices:
- offline
- nat-to-wan
- bridge-to-wan
- bridge-to-ssid
port4_ssid:
description:
- Bridge LAN port 4 to SSID. Source wireless-controller.vap.name.
type: str
port5_mode:
description:
- LAN port 5 mode.
type: str
choices:
- offline
- nat-to-wan
- bridge-to-wan
- bridge-to-ssid
port5_ssid:
description:
- Bridge LAN port 5 to SSID. Source wireless-controller.vap.name.
type: str
port6_mode:
description:
- LAN port 6 mode.
type: str
choices:
- offline
- nat-to-wan
- bridge-to-wan
- bridge-to-ssid
port6_ssid:
description:
- Bridge LAN port 6 to SSID. Source wireless-controller.vap.name.
type: str
port7_mode:
description:
- LAN port 7 mode.
type: str
choices:
- offline
- nat-to-wan
- bridge-to-wan
- bridge-to-ssid
port7_ssid:
description:
- Bridge LAN port 7 to SSID. Source wireless-controller.vap.name.
type: str
port8_mode:
description:
- LAN port 8 mode.
type: str
choices:
- offline
- nat-to-wan
- bridge-to-wan
- bridge-to-ssid
port8_ssid:
description:
- Bridge LAN port 8 to SSID. Source wireless-controller.vap.name.
type: str
lbs:
description:
- Set various location based service (LBS) options.
type: dict
suboptions:
aeroscout:
description:
- Enable/disable AeroScout Real Time Location Service (RTLS) support .
type: str
choices:
- enable
- disable
aeroscout_ap_mac:
description:
- Use BSSID or board MAC address as AP MAC address in AeroScout AP messages .
type: str
choices:
- bssid
- board-mac
aeroscout_mmu_report:
description:
- Enable/disable compounded AeroScout tag and MU report .
type: str
choices:
- enable
- disable
aeroscout_mu:
description:
- Enable/disable AeroScout Mobile Unit (MU) support .
type: str
choices:
- enable
- disable
aeroscout_mu_factor:
description:
- AeroScout MU mode dilution factor .
type: int
aeroscout_mu_timeout:
description:
- AeroScout MU mode timeout (0 - 65535 sec).
type: int
aeroscout_server_ip:
description:
- IP address of AeroScout server.
type: str
aeroscout_server_port:
description:
- AeroScout server UDP listening port.
type: int
ekahau_blink_mode:
description:
- Enable/disable Ekahau blink mode (now known as AiRISTA Flow) to track and locate WiFi tags .
type: str
choices:
- enable
- disable
ekahau_tag:
description:
- WiFi frame MAC address or WiFi Tag.
type: str
erc_server_ip:
description:
- IP address of Ekahau RTLS Controller (ERC).
type: str
erc_server_port:
description:
- Ekahau RTLS Controller (ERC) UDP listening port.
type: int
fortipresence:
description:
- Enable/disable FortiPresence to monitor the location and activity of WiFi clients even if they don"t connect to this WiFi
network .
type: str
choices:
- foreign
- both
- disable
fortipresence_ble:
description:
- Enable/disable FortiPresence finding and reporting BLE devices.
type: str
choices:
- enable
- disable
fortipresence_frequency:
description:
- FortiPresence report transmit frequency (5 - 65535 sec).
type: int
fortipresence_port:
description:
- FortiPresence server UDP listening port .
type: int
fortipresence_project:
description:
- FortiPresence project name (max. 16 characters).
type: str
fortipresence_rogue:
description:
- Enable/disable FortiPresence finding and reporting rogue APs.
type: str
choices:
- enable
- disable
fortipresence_secret:
description:
- FortiPresence secret password (max. 16 characters).
type: str
fortipresence_server:
description:
- FortiPresence server IP address.
type: str
fortipresence_unassoc:
description:
- Enable/disable FortiPresence finding and reporting unassociated stations.
type: str
choices:
- enable
- disable
station_locate:
description:
- Enable/disable client station locating services for all clients, whether associated or not .
type: str
choices:
- enable
- disable
led_schedules:
description:
- Recurring firewall schedules for illuminating LEDs on the FortiAP. If led-state is enabled, LEDs will be visible when at least one of
the schedules is valid. Separate multiple schedule names with a space.
type: list
suboptions:
name:
description:
- LED schedule name. Source firewall.schedule.group.name firewall.schedule.recurring.name.
required: true
type: str
led_state:
description:
- Enable/disable use of LEDs on WTP .
type: str
choices:
- enable
- disable
lldp:
description:
- Enable/disable Link Layer Discovery Protocol (LLDP) for the WTP, FortiAP, or AP .
type: str
choices:
- enable
- disable
login_passwd:
description:
- Set the managed WTP, FortiAP, or AP"s administrator password.
type: str
login_passwd_change:
description:
- Change or reset the administrator password of a managed WTP, FortiAP or AP (yes, default, or no).
type: str
choices:
- yes
- default
- no
max_clients:
description:
- Maximum number of stations (STAs) supported by the WTP .
type: int
name:
description:
- WTP (or FortiAP or AP) profile name.
required: true
type: str
platform:
description:
- WTP, FortiAP, or AP platform.
type: dict
suboptions:
ddscan:
description:
- Enable/disable use of one radio for dedicated dual-band scanning to detect RF characterization and wireless threat management.
type: str
choices:
- enable
- disable
mode:
description:
- Configure operation mode of 5G radios .
type: str
choices:
- single-5G
- dual-5G
type:
description:
- WTP, FortiAP or AP platform type. There are built-in WTP profiles for all supported FortiAP models. You can select a built-in
profile and customize it or create a new profile.
type: str
choices:
- AP-11N
- 220B
- 210B
- 222B
- 112B
- 320B
- 11C
- 14C
- 223B
- 28C
- 320C
- 221C
- 25D
- 222C
- 224D
- 214B
- 21D
- 24D
- 112D
- 223C
- 321C
- C220C
- C225C
- C23JD
- C24JE
- S321C
- S322C
- S323C
- S311C
- S313C
- S321CR
- S322CR
- S323CR
- S421E
- S422E
- S423E
- 421E
- 423E
- 221E
- 222E
- 223E
- 224E
- S221E
- S223E
- U421E
- U422EV
- U423E
- U221EV
- U223EV
- U24JEV
- U321EV
- U323EV
- 231E
- 321E
- 431F
- 432F
- 433F
- 231F
- 234F
- 23JF
- U431F
- U433F
- 831F
- U231F
- U234F
- U432F
poe_mode:
description:
- Set the WTP, FortiAP, or AP"s PoE mode.
type: str
choices:
- auto
- 8023af
- 8023at
- power-adapter
- full
- high
- low
radio_1:
description:
- Configuration options for radio 1.
type: dict
suboptions:
airtime_fairness:
description:
- Enable/disable airtime fairness .
type: str
choices:
- enable
- disable
amsdu:
description:
- Enable/disable 802.11n AMSDU support. AMSDU can improve performance if supported by your WiFi clients .
type: str
choices:
- enable
- disable
ap_handoff:
description:
- Enable/disable AP handoff of clients to other APs .
type: str
choices:
- enable
- disable
ap_sniffer_addr:
description:
- MAC address to monitor.
type: str
ap_sniffer_bufsize:
description:
- Sniffer buffer size (1 - 32 MB).
type: int
ap_sniffer_chan:
description:
- Channel on which to operate the sniffer .
type: int
ap_sniffer_ctl:
description:
- Enable/disable sniffer on WiFi control frame .
type: str
choices:
- enable
- disable
ap_sniffer_data:
description:
- Enable/disable sniffer on WiFi data frame .
type: str
choices:
- enable
- disable
ap_sniffer_mgmt_beacon:
description:
- Enable/disable sniffer on WiFi management Beacon frames .
type: str
choices:
- enable
- disable
ap_sniffer_mgmt_other:
description:
- Enable/disable sniffer on WiFi management other frames .
type: str
choices:
- enable
- disable
ap_sniffer_mgmt_probe:
description:
- Enable/disable sniffer on WiFi management probe frames .
type: str
choices:
- enable
- disable
auto_power_high:
description:
- Automatic transmit power high limit in dBm (the actual range of transmit power depends on the AP platform type).
type: int
auto_power_level:
description:
- Enable/disable automatic power-level adjustment to prevent co-channel interference .
type: str
choices:
- enable
- disable
auto_power_low:
description:
- Automatic transmission power low limit in dBm (the actual range of transmit power depends on the AP platform type).
type: int
auto_power_target:
description:
- The target of automatic transmit power adjustment in dBm. (-95 to -20).
type: str
band:
description:
- WiFi band that Radio 1 operates on.
type: str
choices:
- 802.11a
- 802.11b
- 802.11g
- 802.11n
- 802.11n-5G
- 802.11ac
- 802.11n,g-only
- 802.11g-only
- 802.11n-only
- 802.11n-5G-only
- 802.11ac,n-only
- 802.11ac-only
- 802.11ax-5G
- 802.11ax
- 802.11ax,ac-only
- 802.11ax,ac,n-only
- 802.11ax-5G-only
- 802.11ax,n-only
- 802.11ax,n,g-only
- 802.11ax-only
- 802.11ac-2G
band_5g_type:
description:
- WiFi 5G band type.
type: str
choices:
- 5g-full
- 5g-high
- 5g-low
bandwidth_admission_control:
description:
- Enable/disable WiFi multimedia (WMM) bandwidth admission control to optimize WiFi bandwidth use. A request to join the wireless
network is only allowed if the access point has enough bandwidth to support it.
type: str
choices:
- enable
- disable
bandwidth_capacity:
description:
- Maximum bandwidth capacity allowed (1 - 600000 Kbps).
type: int
beacon_interval:
description:
- Beacon interval. The time between beacon frames in msec (the actual range of beacon interval depends on the AP platform type).
type: int
bss_color:
description:
- BSS color value for this 11ax radio (0 - 63, 0 means disable. ).
type: int
call_admission_control:
description:
- Enable/disable WiFi multimedia (WMM) call admission control to optimize WiFi bandwidth use for VoIP calls. New VoIP calls are
only accepted if there is enough bandwidth available to support them.
type: str
choices:
- enable
- disable
call_capacity:
description:
- Maximum number of Voice over WLAN (VoWLAN) phones supported by the radio (0 - 60).
type: int
channel:
description:
- Selected list of wireless radio channels.
type: list
suboptions:
chan:
description:
- Channel number.
required: true
type: str
channel_bonding:
description:
- 'Channel bandwidth: 80, 40, or 20MHz. Channels may use both 20 and 40 by enabling coexistence.'
type: str
choices:
- 80MHz
- 40MHz
- 20MHz
- 160MHz
channel_utilization:
description:
- Enable/disable measuring channel utilization.
type: str
choices:
- enable
- disable
coexistence:
description:
- Enable/disable allowing both HT20 and HT40 on the same radio .
type: str
choices:
- enable
- disable
darrp:
description:
- Enable/disable Distributed Automatic Radio Resource Provisioning (DARRP) to make sure the radio is always using the most optimal
channel .
type: str
choices:
- enable
- disable
drma:
description:
- Enable/disable dynamic radio mode assignment (DRMA) .
type: str
choices:
- disable
- enable
drma_sensitivity:
description:
- Network Coverage Factor (NCF) percentage required to consider a radio as redundant .
type: str
choices:
- low
- medium
- high
dtim:
description:
- DTIM interval. The frequency to transmit Delivery Traffic Indication Message (or Map) (DTIM) messages (1 - 255). Set higher to
save client battery life.
type: int
frag_threshold:
description:
- Maximum packet size that can be sent without fragmentation (800 - 2346 bytes).
type: int
frequency_handoff:
description:
- Enable/disable frequency handoff of clients to other channels .
type: str
choices:
- enable
- disable
iperf_protocol:
description:
- Iperf test protocol .
type: str
choices:
- udp
- tcp
iperf_server_port:
description:
- Iperf service port number.
type: int
max_clients:
description:
- Maximum number of stations (STAs) or WiFi clients supported by the radio. Range depends on the hardware.
type: int
max_distance:
description:
- Maximum expected distance between the AP and clients (0 - 54000 m).
type: int
mode:
description:
- Mode of radio 1. Radio 1 can be disabled, configured as an access point, a rogue AP monitor, or a sniffer.
type: str
choices:
- disabled
- ap
- monitor
- sniffer
- sam
power_level:
description:
- Radio power level as a percentage of the maximum transmit power (0 - 100).
type: int
power_mode:
description:
- Set radio effective isotropic radiated power (EIRP) in dBm or by a percentage of the maximum EIRP . This power takes into
account both radio transmit power and antenna gain. Higher power level settings may be constrained by local regulatory
requirements and AP capabilities.
type: str
choices:
- dBm
- percentage
power_value:
description:
- Radio EIRP power in dBm (1 - 33).
type: int
powersave_optimize:
description:
- Enable client power-saving features such as TIM, AC VO, and OBSS etc.
type: str
choices:
- tim
- ac-vo
- no-obss-scan
- no-11b-rate
- client-rate-follow
protection_mode:
description:
- Enable/disable 802.11g protection modes to support backwards compatibility with older clients (rtscts, ctsonly, disable).
type: str
choices:
- rtscts
- ctsonly
- disable
radio_id:
description:
- radio-id
type: int
rts_threshold:
description:
- Maximum packet size for RTS transmissions, specifying the maximum size of a data packet before RTS/CTS (256 - 2346 bytes).
type: int
sam_bssid:
description:
- BSSID for WiFi network.
type: str
sam_captive_portal:
description:
- Enable/disable Captive Portal Authentication .
type: str
choices:
- enable
- disable
sam_cwp_failure_string:
description:
- Failure identification on the page after an incorrect login.
type: str
sam_cwp_match_string:
description:
- Identification string from the captive portal login form.
type: str
sam_cwp_password:
description:
- Password for captive portal authentication.
type: str
sam_cwp_success_string:
description:
- Success identification on the page after a successful login.
type: str
sam_cwp_test_url:
description:
- Website the client is trying to access.
type: str
sam_cwp_username:
description:
- Username for captive portal authentication.
type: str
sam_password:
description:
- Passphrase for WiFi network connection.
type: str
sam_report_intv:
description:
- SAM report interval (sec), 0 for a one-time report.
type: int
sam_security_type:
description:
- Select WiFi network security type .
type: str
choices:
- open
- wpa-personal
- wpa-enterprise
sam_server:
description:
- SAM test server IP address or domain name.
type: str
sam_server_fqdn:
description:
- SAM test server domain name.
type: str
sam_server_ip:
description:
- SAM test server IP address.
type: str
sam_server_type:
description:
- Select SAM server type .
type: str
choices:
- ip
- fqdn
sam_ssid:
description:
- SSID for WiFi network.
type: str
sam_test:
description:
- Select SAM test type .
type: str
choices:
- ping
- iperf
sam_username:
description:
- Username for WiFi network connection.
type: str
short_guard_interval:
description:
- Use either the short guard interval (Short GI) of 400 ns or the long guard interval (Long GI) of 800 ns.
type: str
choices:
- enable
- disable
spectrum_analysis:
description:
- Enable/disable spectrum analysis to find interference that would negatively impact wireless performance.
type: str
choices:
- enable
- disable
- scan-only
transmit_optimize:
description:
- Packet transmission optimization options including power saving, aggregation limiting, retry limiting, etc. All are enabled by
default.
type: str
choices:
- disable
- power-save
- aggr-limit
- retry-limit
- send-bar
vap_all:
description:
- Enable/disable the automatic inheritance of all Virtual Access Points (VAPs) .
type: str
choices:
- enable
- disable
- tunnel
- bridge
- manual
vaps:
description:
- Manually selected list of Virtual Access Points (VAPs).
type: list
suboptions:
name:
description:
- Virtual Access Point (VAP) name. Source wireless-controller.vap-group.name wireless-controller.vap.name.
required: true
type: str
wids_profile:
description:
- Wireless Intrusion Detection System (WIDS) profile name to assign to the radio. Source wireless-controller.wids-profile.name.
type: str
zero_wait_dfs:
description:
- Enable/disable zero wait DFS on radio .
type: str
choices:
- enable
- disable
radio_2:
description:
- Configuration options for radio 2.
type: dict
suboptions:
airtime_fairness:
description:
- Enable/disable airtime fairness .
type: str
choices:
- enable
- disable
amsdu:
description:
- Enable/disable 802.11n AMSDU support. AMSDU can improve performance if supported by your WiFi clients .
type: str
choices:
- enable
- disable
ap_handoff:
description:
- Enable/disable AP handoff of clients to other APs .
type: str
choices:
- enable
- disable
ap_sniffer_addr:
description:
- MAC address to monitor.
type: str
ap_sniffer_bufsize:
description:
- Sniffer buffer size (1 - 32 MB).
type: int
ap_sniffer_chan:
description:
- Channel on which to operate the sniffer .
type: int
ap_sniffer_ctl:
description:
- Enable/disable sniffer on WiFi control frame .
type: str
choices:
- enable
- disable
ap_sniffer_data:
description:
- Enable/disable sniffer on WiFi data frame .
type: str
choices:
- enable
- disable
ap_sniffer_mgmt_beacon:
description:
- Enable/disable sniffer on WiFi management Beacon frames .
type: str
choices:
- enable
- disable
ap_sniffer_mgmt_other:
description:
- Enable/disable sniffer on WiFi management other frames .
type: str
choices:
- enable
- disable
ap_sniffer_mgmt_probe:
description:
- Enable/disable sniffer on WiFi management probe frames .
type: str
choices:
- enable
- disable
auto_power_high:
description:
- Automatic transmit power high limit in dBm (the actual range of transmit power depends on the AP platform type).
type: int
auto_power_level:
description:
- Enable/disable automatic power-level adjustment to prevent co-channel interference .
type: str
choices:
- enable
- disable
auto_power_low:
description:
- Automatic transmission power low limit in dBm (the actual range of transmit power depends on the AP platform type).
type: int
auto_power_target:
description:
- The target of automatic transmit power adjustment in dBm. (-95 to -20).
type: str
band:
description:
- WiFi band that Radio 2 operates on.
type: str
choices:
- 802.11a
- 802.11b
- 802.11g
- 802.11n
- 802.11n-5G
- 802.11ac
- 802.11n,g-only
- 802.11g-only
- 802.11n-only
- 802.11n-5G-only
- 802.11ac,n-only
- 802.11ac-only
- 802.11ax-5G
- 802.11ax
- 802.11ax,ac-only
- 802.11ax,ac,n-only
- 802.11ax-5G-only
- 802.11ax,n-only
- 802.11ax,n,g-only
- 802.11ax-only
- 802.11ac-2G
band_5g_type:
description:
- WiFi 5G band type.
type: str
choices:
- 5g-full
- 5g-high
- 5g-low
bandwidth_admission_control:
description:
- Enable/disable WiFi multimedia (WMM) bandwidth admission control to optimize WiFi bandwidth use. A request to join the wireless
network is only allowed if the access point has enough bandwidth to support it.
type: str
choices:
- enable
- disable
bandwidth_capacity:
description:
- Maximum bandwidth capacity allowed (1 - 600000 Kbps).
type: int
beacon_interval:
description:
- Beacon interval. The time between beacon frames in msec (the actual range of beacon interval depends on the AP platform type).
type: int
bss_color:
description:
- BSS color value for this 11ax radio (0 - 63, 0 means disable. ).
type: int
call_admission_control:
description:
- Enable/disable WiFi multimedia (WMM) call admission control to optimize WiFi bandwidth use for VoIP calls. New VoIP calls are
only accepted if there is enough bandwidth available to support them.
type: str
choices:
- enable
- disable
call_capacity:
description:
- Maximum number of Voice over WLAN (VoWLAN) phones supported by the radio (0 - 60).
type: int
channel:
description:
- Selected list of wireless radio channels.
type: list
suboptions:
chan:
description:
- Channel number.
required: true
type: str
channel_bonding:
description:
- 'Channel bandwidth: 80, 40, or 20MHz. Channels may use both 20 and 40 by enabling coexistence.'
type: str
choices:
- 80MHz
- 40MHz
- 20MHz
- 160MHz
channel_utilization:
description:
- Enable/disable measuring channel utilization.
type: str
choices:
- enable
- disable
coexistence:
description:
- Enable/disable allowing both HT20 and HT40 on the same radio .
type: str
choices:
- enable
- disable
darrp:
description:
- Enable/disable Distributed Automatic Radio Resource Provisioning (DARRP) to make sure the radio is always using the most optimal
channel .
type: str
choices:
- enable
- disable
drma:
description:
- Enable/disable dynamic radio mode assignment (DRMA) .
type: str
choices:
- disable
- enable
drma_sensitivity:
description:
- Network Coverage Factor (NCF) percentage required to consider a radio as redundant .
type: str
choices:
- low
- medium
- high
dtim:
description:
- DTIM interval. The frequency to transmit Delivery Traffic Indication Message (or Map) (DTIM) messages (1 - 255). Set higher to
save client battery life.
type: int
frag_threshold:
description:
- Maximum packet size that can be sent without fragmentation (800 - 2346 bytes).
type: int
frequency_handoff:
description:
- Enable/disable frequency handoff of clients to other channels .
type: str
choices:
- enable
- disable
iperf_protocol:
description:
- Iperf test protocol .
type: str
choices:
- udp
- tcp
iperf_server_port:
description:
- Iperf service port number.
type: int
max_clients:
description:
- Maximum number of stations (STAs) or WiFi clients supported by the radio. Range depends on the hardware.
type: int
max_distance:
description:
- Maximum expected distance between the AP and clients (0 - 54000 m).
type: int
mode:
description:
- Mode of radio 2. Radio 2 can be disabled, configured as an access point, a rogue AP monitor, or a sniffer.
type: str
choices:
- disabled
- ap
- monitor
- sniffer
- sam
power_level:
description:
- Radio power level as a percentage of the maximum transmit power (0 - 100).
type: int
power_mode:
description:
- Set radio effective isotropic radiated power (EIRP) in dBm or by a percentage of the maximum EIRP . This power takes into
account both radio transmit power and antenna gain. Higher power level settings may be constrained by local regulatory
requirements and AP capabilities.
type: str
choices:
- dBm
- percentage
power_value:
description:
- Radio EIRP power in dBm (1 - 33).
type: int
powersave_optimize:
description:
- Enable client power-saving features such as TIM, AC VO, and OBSS etc.
type: str
choices:
- tim
- ac-vo
- no-obss-scan
- no-11b-rate
- client-rate-follow
protection_mode:
description:
- Enable/disable 802.11g protection modes to support backwards compatibility with older clients (rtscts, ctsonly, disable).
type: str
choices:
- rtscts
- ctsonly
- disable
radio_id:
description:
- radio-id
type: int
rts_threshold:
description:
- Maximum packet size for RTS transmissions, specifying the maximum size of a data packet before RTS/CTS (256 - 2346 bytes).
type: int
sam_bssid:
description:
- BSSID for WiFi network.
type: str
sam_captive_portal:
description:
- Enable/disable Captive Portal Authentication .
type: str
choices:
- enable
- disable
sam_cwp_failure_string:
description:
- Failure identification on the page after an incorrect login.
type: str
sam_cwp_match_string:
description:
- Identification string from the captive portal login form.
type: str
sam_cwp_password:
description:
- Password for captive portal authentication.
type: str
sam_cwp_success_string:
description:
- Success identification on the page after a successful login.
type: str
sam_cwp_test_url:
description:
- Website the client is trying to access.
type: str
sam_cwp_username:
description:
- Username for captive portal authentication.
type: str
sam_password:
description:
- Passphrase for WiFi network connection.
type: str
sam_report_intv:
description:
- SAM report interval (sec), 0 for a one-time report.
type: int
sam_security_type:
description:
- Select WiFi network security type .
type: str
choices:
- open
- wpa-personal
- wpa-enterprise
sam_server:
description:
- SAM test server IP address or domain name.
type: str
sam_server_fqdn:
description:
- SAM test server domain name.
type: str
sam_server_ip:
description:
- SAM test server IP address.
type: str
sam_server_type:
description:
- Select SAM server type .
type: str
choices:
- ip
- fqdn
sam_ssid:
description:
- SSID for WiFi network.
type: str
sam_test:
description:
- Select SAM test type .
type: str
choices:
- ping
- iperf
sam_username:
description:
- Username for WiFi network connection.
type: str
short_guard_interval:
description:
- Use either the short guard interval (Short GI) of 400 ns or the long guard interval (Long GI) of 800 ns.
type: str
choices:
- enable
- disable
spectrum_analysis:
description:
- Enable/disable spectrum analysis to find interference that would negatively impact wireless performance.
type: str
choices:
- enable
- disable
- scan-only
transmit_optimize:
description:
- Packet transmission optimization options including power saving, aggregation limiting, retry limiting, etc. All are enabled by
default.
type: str
choices:
- disable
- power-save
- aggr-limit
- retry-limit
- send-bar
vap_all:
description:
- Enable/disable the automatic inheritance of all Virtual Access Points (VAPs) .
type: str
choices:
- enable
- disable
- tunnel
- bridge
- manual
vaps:
description:
- Manually selected list of Virtual Access Points (VAPs).
type: list
suboptions:
name:
description:
- Virtual Access Point (VAP) name. Source wireless-controller.vap-group.name wireless-controller.vap.name.
required: true
type: str
wids_profile:
description:
- Wireless Intrusion Detection System (WIDS) profile name to assign to the radio. Source wireless-controller.wids-profile.name.
type: str
zero_wait_dfs:
description:
- Enable/disable zero wait DFS on radio .
type: str
choices:
- enable
- disable
radio_3:
description:
- Configuration options for radio 3.
type: dict
suboptions:
airtime_fairness:
description:
- Enable/disable airtime fairness .
type: str
choices:
- enable
- disable
amsdu:
description:
- Enable/disable 802.11n AMSDU support. AMSDU can improve performance if supported by your WiFi clients .
type: str
choices:
- enable
- disable
ap_handoff:
description:
- Enable/disable AP handoff of clients to other APs .
type: str
choices:
- enable
- disable
ap_sniffer_addr:
description:
- MAC address to monitor.
type: str
ap_sniffer_bufsize:
description:
- Sniffer buffer size (1 - 32 MB).
type: int
ap_sniffer_chan:
description:
- Channel on which to operate the sniffer .
type: int
ap_sniffer_ctl:
description:
- Enable/disable sniffer on WiFi control frame .
type: str
choices:
- enable
- disable
ap_sniffer_data:
description:
- Enable/disable sniffer on WiFi data frame .
type: str
choices:
- enable
- disable
ap_sniffer_mgmt_beacon:
description:
- Enable/disable sniffer on WiFi management Beacon frames .
type: str
choices:
- enable
- disable
ap_sniffer_mgmt_other:
description:
- Enable/disable sniffer on WiFi management other frames .
type: str
choices:
- enable
- disable
ap_sniffer_mgmt_probe:
description:
- Enable/disable sniffer on WiFi management probe frames .
type: str
choices:
- enable
- disable
auto_power_high:
description:
- The upper bound of automatic transmit power adjustment in dBm (the actual range of transmit power depends on the AP platform
type).
type: int
auto_power_level:
description:
- Enable/disable automatic power-level adjustment to prevent co-channel interference .
type: str
choices:
- enable
- disable
auto_power_low:
description:
- The lower bound of automatic transmit power adjustment in dBm (the actual range of transmit power depends on the AP platform
type).
type: int
auto_power_target:
description:
- The target of automatic transmit power adjustment in dBm. (-95 to -20).
type: str
band:
description:
- WiFi band that Radio 3 operates on.
type: str
choices:
- 802.11a
- 802.11b
- 802.11g
- 802.11n
- 802.11n-5G
- 802.11ac
- 802.11ax-5G
- 802.11ax
- 802.11n,g-only
- 802.11g-only
- 802.11n-only
- 802.11n-5G-only
- 802.11ac,n-only
- 802.11ac-only
- 802.11ax,ac-only
- 802.11ax,ac,n-only
- 802.11ax-5G-only
- 802.11ax,n-only
- 802.11ax,n,g-only
- 802.11ax-only
- 802.11ac-2G
band_5g_type:
description:
- WiFi 5G band type.
type: str
choices:
- 5g-full
- 5g-high
- 5g-low
bandwidth_admission_control:
description:
- Enable/disable WiFi multimedia (WMM) bandwidth admission control to optimize WiFi bandwidth use. A request to join the wireless
network is only allowed if the access point has enough bandwidth to support it.
type: str
choices:
- enable
- disable
bandwidth_capacity:
description:
- Maximum bandwidth capacity allowed (1 - 600000 Kbps).
type: int
beacon_interval:
description:
- Beacon interval. The time between beacon frames in msec (the actual range of beacon interval depends on the AP platform type).
type: int
bss_color:
description:
- BSS color value for this 11ax radio (0 - 63, 0 means disable. ).
type: int
call_admission_control:
description:
- Enable/disable WiFi multimedia (WMM) call admission control to optimize WiFi bandwidth use for VoIP calls. New VoIP calls are
only accepted if there is enough bandwidth available to support them.
type: str
choices:
- enable
- disable
call_capacity:
description:
- Maximum number of Voice over WLAN (VoWLAN) phones supported by the radio (0 - 60).
type: int
channel:
description:
- Selected list of wireless radio channels.
type: list
suboptions:
chan:
description:
- Channel number.
required: true
type: str
channel_bonding:
description:
- 'Channel bandwidth: 160,80, 40, or 20MHz. Channels may use both 20 and 40 by enabling coexistence.'
type: str
choices:
- 160MHz
- 80MHz
- 40MHz
- 20MHz
channel_utilization:
description:
- Enable/disable measuring channel utilization.
type: str
choices:
- enable
- disable
coexistence:
description:
- Enable/disable allowing both HT20 and HT40 on the same radio .
type: str
choices:
- enable
- disable
darrp:
description:
- Enable/disable Distributed Automatic Radio Resource Provisioning (DARRP) to make sure the radio is always using the most optimal
channel .
type: str
choices:
- enable
- disable
drma:
description:
- Enable/disable dynamic radio mode assignment (DRMA) .
type: str
choices:
- disable
- enable
drma_sensitivity:
description:
- Network Coverage Factor (NCF) percentage required to consider a radio as redundant .
type: str
choices:
- low
- medium
- high
dtim:
description:
- Delivery Traffic Indication Map (DTIM) period (1 - 255). Set higher to save battery life of WiFi client in power-save mode.
type: int
frag_threshold:
description:
- Maximum packet size that can be sent without fragmentation (800 - 2346 bytes).
type: int
frequency_handoff:
description:
- Enable/disable frequency handoff of clients to other channels .
type: str
choices:
- enable
- disable
iperf_protocol:
description:
- Iperf test protocol .
type: str
choices:
- udp
- tcp
iperf_server_port:
description:
- Iperf service port number.
type: int
max_clients:
description:
- Maximum number of stations (STAs) or WiFi clients supported by the radio. Range depends on the hardware.
type: int
max_distance:
description:
- Maximum expected distance between the AP and clients (0 - 54000 m).
type: int
mode:
description:
- Mode of radio 3. Radio 3 can be disabled, configured as an access point, a rogue AP monitor, or a sniffer.
type: str
choices:
- disabled
- ap
- monitor
- sniffer
- sam
power_level:
description:
- Radio power level as a percentage of the maximum transmit power (0 - 100).
type: int
power_mode:
description:
- Set radio effective isotropic radiated power (EIRP) in dBm or by a percentage of the maximum EIRP . This power takes into
account both radio transmit power and antenna gain. Higher power level settings may be constrained by local regulatory
requirements and AP capabilities.
type: str
choices:
- dBm
- percentage
power_value:
description:
- Radio EIRP power in dBm (1 - 33).
type: int
powersave_optimize:
description:
- Enable client power-saving features such as TIM, AC VO, and OBSS etc.
type: str
choices:
- tim
- ac-vo
- no-obss-scan
- no-11b-rate
- client-rate-follow
protection_mode:
description:
- Enable/disable 802.11g protection modes to support backwards compatibility with older clients (rtscts, ctsonly, disable).
type: str
choices:
- rtscts
- ctsonly
- disable
radio_id:
description:
- radio-id
type: int
rts_threshold:
description:
- Maximum packet size for RTS transmissions, specifying the maximum size of a data packet before RTS/CTS (256 - 2346 bytes).
type: int
sam_bssid:
description:
- BSSID for WiFi network.
type: str
sam_captive_portal:
description:
- Enable/disable Captive Portal Authentication .
type: str
choices:
- enable
- disable
sam_cwp_failure_string:
description:
- Failure identification on the page after an incorrect login.
type: str
sam_cwp_match_string:
description:
- Identification string from the captive portal login form.
type: str
sam_cwp_password:
description:
- Password for captive portal authentication.
type: str
sam_cwp_success_string:
description:
- Success identification on the page after a successful login.
type: str
sam_cwp_test_url:
description:
- Website the client is trying to access.
type: str
sam_cwp_username:
description:
- Username for captive portal authentication.
type: str
sam_password:
description:
- Passphrase for WiFi network connection.
type: str
sam_report_intv:
description:
- SAM report interval (sec), 0 for a one-time report.
type: int
sam_security_type:
description:
- Select WiFi network security type .
type: str
choices:
- open
- wpa-personal
- wpa-enterprise
sam_server:
description:
- SAM test server IP address or domain name.
type: str
sam_server_fqdn:
description:
- SAM test server domain name.
type: str
sam_server_ip:
description:
- SAM test server IP address.
type: str
sam_server_type:
description:
- Select SAM server type .
type: str
choices:
- ip
- fqdn
sam_ssid:
description:
- SSID for WiFi network.
type: str
sam_test:
description:
- Select SAM test type .
type: str
choices:
- ping
- iperf
sam_username:
description:
- Username for WiFi network connection.
type: str
short_guard_interval:
description:
- Use either the short guard interval (Short GI) of 400 ns or the long guard interval (Long GI) of 800 ns.
type: str
choices:
- enable
- disable
spectrum_analysis:
description:
- Enable/disable spectrum analysis to find interference that would negatively impact wireless performance.
type: str
choices:
- enable
- disable
- scan-only
transmit_optimize:
description:
- Packet transmission optimization options including power saving, aggregation limiting, retry limiting, etc. All are enabled by
default.
type: str
choices:
- disable
- power-save
- aggr-limit
- retry-limit
- send-bar
vap_all:
description:
- Enable/disable the automatic inheritance of all Virtual Access Points (VAPs) .
type: str
choices:
- enable
- disable
- tunnel
- bridge
- manual
vaps:
description:
- Manually selected list of Virtual Access Points (VAPs).
type: list
suboptions:
name:
description:
- Virtual Access Point (VAP) name. Source wireless-controller.vap-group.name system.interface.name.
required: true
type: str
wids_profile:
description:
- Wireless Intrusion Detection System (WIDS) profile name to assign to the radio. Source wireless-controller.wids-profile.name.
type: str
zero_wait_dfs:
description:
- Enable/disable zero wait DFS on radio .
type: str
choices:
- enable
- disable
radio_4:
description:
- Configuration options for radio 4.
type: dict
suboptions:
airtime_fairness:
description:
- Enable/disable airtime fairness .
type: str
choices:
- enable
- disable
amsdu:
description:
- Enable/disable 802.11n AMSDU support. AMSDU can improve performance if supported by your WiFi clients .
type: str
choices:
- enable
- disable
ap_handoff:
description:
- Enable/disable AP handoff of clients to other APs .
type: str
choices:
- enable
- disable
ap_sniffer_addr:
description:
- MAC address to monitor.
type: str
ap_sniffer_bufsize:
description:
- Sniffer buffer size (1 - 32 MB).
type: int
ap_sniffer_chan:
description:
- Channel on which to operate the sniffer .
type: int
ap_sniffer_ctl:
description:
- Enable/disable sniffer on WiFi control frame .
type: str
choices:
- enable
- disable
ap_sniffer_data:
description:
- Enable/disable sniffer on WiFi data frame .
type: str
choices:
- enable
- disable
ap_sniffer_mgmt_beacon:
description:
- Enable/disable sniffer on WiFi management Beacon frames .
type: str
choices:
- enable
- disable
ap_sniffer_mgmt_other:
description:
- Enable/disable sniffer on WiFi management other frames .
type: str
choices:
- enable
- disable
ap_sniffer_mgmt_probe:
description:
- Enable/disable sniffer on WiFi management probe frames .
type: str
choices:
- enable
- disable
auto_power_high:
description:
- The upper bound of automatic transmit power adjustment in dBm (the actual range of transmit power depends on the AP platform
type).
type: int
auto_power_level:
description:
- Enable/disable automatic power-level adjustment to prevent co-channel interference .
type: str
choices:
- enable
- disable
auto_power_low:
description:
- The lower bound of automatic transmit power adjustment in dBm (the actual range of transmit power depends on the AP platform
type).
type: int
auto_power_target:
description:
- The target of automatic transmit power adjustment in dBm. (-95 to -20).
type: str
band:
description:
- WiFi band that Radio 3 operates on.
type: str
choices:
- 802.11a
- 802.11b
- 802.11g
- 802.11n
- 802.11n-5G
- 802.11ac
- 802.11ax-5G
- 802.11ax
- 802.11n,g-only
- 802.11g-only
- 802.11n-only
- 802.11n-5G-only
- 802.11ac,n-only
- 802.11ac-only
- 802.11ax,ac-only
- 802.11ax,ac,n-only
- 802.11ax-5G-only
- 802.11ax,n-only
- 802.11ax,n,g-only
- 802.11ax-only
- 802.11ac-2G
band_5g_type:
description:
- WiFi 5G band type.
type: str
choices:
- 5g-full
- 5g-high
- 5g-low
bandwidth_admission_control:
description:
- Enable/disable WiFi multimedia (WMM) bandwidth admission control to optimize WiFi bandwidth use. A request to join the wireless
network is only allowed if the access point has enough bandwidth to support it.
type: str
choices:
- enable
- disable
bandwidth_capacity:
description:
- Maximum bandwidth capacity allowed (1 - 600000 Kbps).
type: int
beacon_interval:
description:
- Beacon interval. The time between beacon frames in msec (the actual range of beacon interval depends on the AP platform type).
type: int
bss_color:
description:
- BSS color value for this 11ax radio (0 - 63, 0 means disable. ).
type: int
call_admission_control:
description:
- Enable/disable WiFi multimedia (WMM) call admission control to optimize WiFi bandwidth use for VoIP calls. New VoIP calls are
only accepted if there is enough bandwidth available to support them.
type: str
choices:
- enable
- disable
call_capacity:
description:
- Maximum number of Voice over WLAN (VoWLAN) phones supported by the radio (0 - 60).
type: int
channel:
description:
- Selected list of wireless radio channels.
type: list
suboptions:
chan:
description:
- Channel number.
required: true
type: str
channel_bonding:
description:
- 'Channel bandwidth: 160,80, 40, or 20MHz. Channels may use both 20 and 40 by enabling coexistence.'
type: str
choices:
- 160MHz
- 80MHz
- 40MHz
- 20MHz
channel_utilization:
description:
- Enable/disable measuring channel utilization.
type: str
choices:
- enable
- disable
coexistence:
description:
- Enable/disable allowing both HT20 and HT40 on the same radio .
type: str
choices:
- enable
- disable
darrp:
description:
- Enable/disable Distributed Automatic Radio Resource Provisioning (DARRP) to make sure the radio is always using the most optimal
channel .
type: str
choices:
- enable
- disable
drma:
description:
- Enable/disable dynamic radio mode assignment (DRMA) .
type: str
choices:
- disable
- enable
drma_sensitivity:
description:
- Network Coverage Factor (NCF) percentage required to consider a radio as redundant .
type: str
choices:
- low
- medium
- high
dtim:
description:
- Delivery Traffic Indication Map (DTIM) period (1 - 255). Set higher to save battery life of WiFi client in power-save mode.
type: int
frag_threshold:
description:
- Maximum packet size that can be sent without fragmentation (800 - 2346 bytes).
type: int
frequency_handoff:
description:
- Enable/disable frequency handoff of clients to other channels .
type: str
choices:
- enable
- disable
iperf_protocol:
description:
- Iperf test protocol .
type: str
choices:
- udp
- tcp
iperf_server_port:
description:
- Iperf service port number.
type: int
max_clients:
description:
- Maximum number of stations (STAs) or WiFi clients supported by the radio. Range depends on the hardware.
type: int
max_distance:
description:
- Maximum expected distance between the AP and clients (0 - 54000 m).
type: int
mode:
description:
- Mode of radio 3. Radio 3 can be disabled, configured as an access point, a rogue AP monitor, or a sniffer.
type: str
choices:
- disabled
- ap
- monitor
- sniffer
- sam
power_level:
description:
- Radio power level as a percentage of the maximum transmit power (0 - 100).
type: int
power_mode:
description:
- Set radio effective isotropic radiated power (EIRP) in dBm or by a percentage of the maximum EIRP . This power takes into
account both radio transmit power and antenna gain. Higher power level settings may be constrained by local regulatory
requirements and AP capabilities.
type: str
choices:
- dBm
- percentage
power_value:
description:
- Radio EIRP power in dBm (1 - 33).
type: int
powersave_optimize:
description:
- Enable client power-saving features such as TIM, AC VO, and OBSS etc.
type: str
choices:
- tim
- ac-vo
- no-obss-scan
- no-11b-rate
- client-rate-follow
protection_mode:
description:
- Enable/disable 802.11g protection modes to support backwards compatibility with older clients (rtscts, ctsonly, disable).
type: str
choices:
- rtscts
- ctsonly
- disable
rts_threshold:
description:
- Maximum packet size for RTS transmissions, specifying the maximum size of a data packet before RTS/CTS (256 - 2346 bytes).
type: int
sam_bssid:
description:
- BSSID for WiFi network.
type: str
sam_captive_portal:
description:
- Enable/disable Captive Portal Authentication .
type: str
choices:
- enable
- disable
sam_cwp_failure_string:
description:
- Failure identification on the page after an incorrect login.
type: str
sam_cwp_match_string:
description:
- Identification string from the captive portal login form.
type: str
sam_cwp_password:
description:
- Password for captive portal authentication.
type: str
sam_cwp_success_string:
description:
- Success identification on the page after a successful login.
type: str
sam_cwp_test_url:
description:
- Website the client is trying to access.
type: str
sam_cwp_username:
description:
- Username for captive portal authentication.
type: str
sam_password:
description:
- Passphrase for WiFi network connection.
type: str
sam_report_intv:
description:
- SAM report interval (sec), 0 for a one-time report.
type: int
sam_security_type:
description:
- Select WiFi network security type .
type: str
choices:
- open
- wpa-personal
- wpa-enterprise
sam_server:
description:
- SAM test server IP address or domain name.
type: str
sam_server_fqdn:
description:
- SAM test server domain name.
type: str
sam_server_ip:
description:
- SAM test server IP address.
type: str
sam_server_type:
description:
- Select SAM server type .
type: str
choices:
- ip
- fqdn
sam_ssid:
description:
- SSID for WiFi network.
type: str
sam_test:
description:
- Select SAM test type .
type: str
choices:
- ping
- iperf
sam_username:
description:
- Username for WiFi network connection.
type: str
short_guard_interval:
description:
- Use either the short guard interval (Short GI) of 400 ns or the long guard interval (Long GI) of 800 ns.
type: str
choices:
- enable
- disable
spectrum_analysis:
description:
- Enable/disable spectrum analysis to find interference that would negatively impact wireless performance.
type: str
choices:
- enable
- disable
- scan-only
transmit_optimize:
description:
- Packet transmission optimization options including power saving, aggregation limiting, retry limiting, etc. All are enabled by
default.
type: str
choices:
- disable
- power-save
- aggr-limit
- retry-limit
- send-bar
vap_all:
description:
- Enable/disable the automatic inheritance of all Virtual Access Points (VAPs) .
type: str
choices:
- enable
- disable
- tunnel
- bridge
- manual
vaps:
description:
- Manually selected list of Virtual Access Points (VAPs).
type: list
suboptions:
name:
description:
- Virtual Access Point (VAP) name. Source wireless-controller.vap-group.name system.interface.name.
required: true
type: str
wids_profile:
description:
- Wireless Intrusion Detection System (WIDS) profile name to assign to the radio. Source wireless-controller.wids-profile.name.
type: str
zero_wait_dfs:
description:
- Enable/disable zero wait DFS on radio .
type: str
choices:
- enable
- disable
split_tunneling_acl:
description:
- Split tunneling ACL filter list.
type: list
suboptions:
dest_ip:
description:
- Destination IP and mask for the split-tunneling subnet.
type: str
id:
description:
- ID.
required: true
type: int
split_tunneling_acl_local_ap_subnet:
description:
- Enable/disable automatically adding local subnetwork of FortiAP to split-tunneling ACL .
type: str
choices:
- enable
- disable
split_tunneling_acl_path:
description:
- Split tunneling ACL path is local/tunnel.
type: str
choices:
- tunnel
- local
tun_mtu_downlink:
description:
- Downlink CAPWAP tunnel MTU (0, 576, or 1500 bytes).
type: int
tun_mtu_uplink:
description:
- Uplink CAPWAP tunnel MTU (0, 576, or 1500 bytes).
type: int
wan_port_mode:
description:
- Enable/disable using a WAN port as a LAN port.
type: str
choices:
- wan-lan
- wan-only
'''
EXAMPLES = '''
- collections:
- fortinet.fortios
connection: httpapi
hosts: fortigate01
vars:
ansible_httpapi_port: 443
ansible_httpapi_use_ssl: true
ansible_httpapi_validate_certs: false
vdom: root
tasks:
- name: fortios_wireless_controller_wtp_profile
fortios_wireless_controller_wtp_profile:
vdom: root
state: present
wireless_controller_wtp_profile:
ap_country: NA
ap_handoff: disable
control_message_offload: ebp-frame
dtls_in_kernel: disable
dtls_policy: clear-text
energy_efficient_ethernet: disable
ext_info_enable: enable
frequency_handoff: disable
handoff_roaming: enable
handoff_rssi: 25
handoff_sta_thresh: 55
ip_fragment_preventing: tcp-mss-adjust
led_state: enable
lldp: enable
login_passwd_change: 'no'
max_clients: 0
name: terr-test-rdmstr
poe_mode: auto
split_tunneling_acl_local_ap_subnet: disable
split_tunneling_acl_path: local
tun_mtu_downlink: 576
tun_mtu_uplink: 576
wan_port_mode: wan-only
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.fortios import FortiOSHandler
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.fortios import check_legacy_fortiosapi
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.fortios import schema_to_module_spec
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.fortios import check_schema_versioning
from ansible_collections.fortinet.fortios.plugins.module_utils.fortimanager.common import FAIL_SOCKET_MSG
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.comparison import is_same_comparison
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.comparison import serialize
def filter_wireless_controller_wtp_profile_data(json):
option_list = ['allowaccess', 'ap_country', 'ap_handoff',
'apcfg_profile', 'ble_profile', 'comment',
'console_login', 'control_message_offload', 'deny_mac_list',
'dtls_in_kernel', 'dtls_policy', 'energy_efficient_ethernet',
'esl_ses_dongle', 'ext_info_enable', 'frequency_handoff',
'handoff_roaming', 'handoff_rssi', 'handoff_sta_thresh',
'indoor_outdoor_deployment', 'ip_fragment_preventing', 'lan',
'lbs', 'led_schedules', 'led_state',
'lldp', 'login_passwd', 'login_passwd_change',
'max_clients', 'name', 'platform',
'poe_mode', 'radio_1', 'radio_2',
'radio_3', 'radio_4', 'split_tunneling_acl',
'split_tunneling_acl_local_ap_subnet', 'split_tunneling_acl_path', 'tun_mtu_downlink',
'tun_mtu_uplink', 'wan_port_mode']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def flatten_single_path(data, path, index):
if not data or index == len(path) or path[index] not in data or not data[path[index]]:
return
if index == len(path) - 1:
data[path[index]] = ' '.join(str(elem) for elem in data[path[index]])
elif isinstance(data[path[index]], list):
for value in data[path[index]]:
flatten_single_path(value, path, index + 1)
else:
flatten_single_path(data[path[index]], path, index + 1)
def flatten_multilists_attributes(data):
multilist_attrs = [
[u'control_message_offload'],
[u'ip_fragment_preventing'],
[u'radio_3', u'transmit_optimize'],
[u'radio_3', u'powersave_optimize'],
[u'radio_2', u'transmit_optimize'],
[u'radio_2', u'powersave_optimize'],
[u'radio_1', u'transmit_optimize'],
[u'radio_1', u'powersave_optimize'],
[u'allowaccess'],
[u'dtls_policy'],
[u'radio_4', u'transmit_optimize'],
[u'radio_4', u'powersave_optimize'],
]
for attr in multilist_attrs:
flatten_single_path(data, attr, 0)
return data
def underscore_to_hyphen(data):
if isinstance(data, list):
for i, elem in enumerate(data):
data[i] = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def wireless_controller_wtp_profile(data, fos, check_mode=False):
vdom = data['vdom']
state = data['state']
wireless_controller_wtp_profile_data = data['wireless_controller_wtp_profile']
wireless_controller_wtp_profile_data = flatten_multilists_attributes(wireless_controller_wtp_profile_data)
filtered_data = underscore_to_hyphen(filter_wireless_controller_wtp_profile_data(wireless_controller_wtp_profile_data))
# check_mode starts from here
if check_mode:
mkey = fos.get_mkey('wireless_controller', 'wtp_profile', filtered_data, vdom=vdom)
current_data = fos.get('wireless_controller', 'wtp_profile', vdom=vdom, mkey=mkey)
is_existed = current_data and current_data.get('http_status') == 200 \
and isinstance(current_data.get('results'), list) \
and len(current_data['results']) > 0
# 2. if it exists and the state is 'present' then compare current settings with desired
if state == 'present' or state is True:
if mkey is None:
return False, True, filtered_data
# if mkey exists then compare each other
# record exits and they're matched or not
if is_existed:
is_same = is_same_comparison(
serialize(current_data['results'][0]), serialize(filtered_data))
return False, not is_same, filtered_data
# record does not exist
return False, True, filtered_data
if state == 'absent':
if mkey is None:
return False, False, filtered_data
if is_existed:
return False, True, filtered_data
return False, False, filtered_data
return True, False, {'reason: ': 'Must provide state parameter'}
if state == "present" or state is True:
return fos.set('wireless-controller',
'wtp-profile',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('wireless-controller',
'wtp-profile',
mkey=filtered_data['name'],
vdom=vdom)
else:
fos._module.fail_json(msg='state must be present or absent!')
def is_successful_status(resp):
return 'status' in resp and resp['status'] == 'success' or \
'http_status' in resp and resp['http_status'] == 200 or \
'http_method' in resp and resp['http_method'] == "DELETE" and resp['http_status'] == 404
def fortios_wireless_controller(data, fos, check_mode):
fos.do_member_operation('wireless_controller_wtp_profile')
if data['wireless_controller_wtp_profile']:
resp = wireless_controller_wtp_profile(data, fos, check_mode)
else:
fos._module.fail_json(msg='missing task body: %s' % ('wireless_controller_wtp_profile'))
if check_mode:
return resp
return not is_successful_status(resp), \
is_successful_status(resp) and \
(resp['revision_changed'] if 'revision_changed' in resp else True), \
resp
versioned_schema = {
"type": "list",
"children": {
"comment": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"split_tunneling_acl": {
"type": "list",
"children": {
"id": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"dest_ip": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"lbs": {
"type": "dict",
"children": {
"fortipresence_port": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"erc_server_port": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"aeroscout": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"fortipresence_frequency": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"fortipresence_ble": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"fortipresence_project": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"aeroscout_ap_mac": {
"type": "string",
"options": [
{
"value": "bssid",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "board-mac",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"aeroscout_mmu_report": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"fortipresence": {
"type": "string",
"options": [
{
"value": "foreign",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "both",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ekahau_tag": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"fortipresence_unassoc": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"erc_server_ip": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"aeroscout_mu_factor": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"fortipresence_server": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"station_locate": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"aeroscout_mu": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"aeroscout_mu_timeout": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"aeroscout_server_ip": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"fortipresence_secret": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"aeroscout_server_port": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"fortipresence_rogue": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ekahau_blink_mode": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"control_message_offload": {
"multiple_values": True,
"type": "list",
"options": [
{
"value": "ebp-frame",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "aeroscout-tag",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "ap-list",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "sta-list",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "sta-cap-list",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "stats",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "aeroscout-mu",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "sta-health",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "spectral-analysis",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"tun_mtu_downlink": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"tun_mtu_uplink": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ble_profile": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"lldp": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"login_passwd_change": {
"type": "string",
"options": [
{
"value": "yes",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "default",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "no",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"platform": {
"type": "dict",
"children": {
"type": {
"type": "string",
"options": [
{
"value": "AP-11N",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "220B",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "210B",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "222B",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "112B",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "320B",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "11C",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "14C",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "223B",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "28C",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "320C",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "221C",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "25D",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "222C",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "224D",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "214B",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "21D",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "24D",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "112D",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "223C",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "321C",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "C220C",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "C225C",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "C23JD",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "C24JE",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "S321C",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "S322C",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "S323C",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "S311C",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "S313C",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "S321CR",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "S322CR",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "S323CR",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "S421E",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "S422E",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "S423E",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "421E",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "423E",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "221E",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "222E",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "223E",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "224E",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "S221E",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "S223E",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "U421E",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "U422EV",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "U423E",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "U221EV",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "U223EV",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "U24JEV",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "U321EV",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "U323EV",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "231E",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": False,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "321E",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "431F",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": False,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "432F",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.3": False,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "433F",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": False,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "231F",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.3": False,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "234F",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.3": False,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "23JF",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.3": False,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "U431F",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "U433F",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "831F",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
{
"value": "U231F",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
{
"value": "U234F",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
{
"value": "U432F",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"mode": {
"type": "string",
"options": [
{
"value": "single-5G",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "dual-5G",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"ddscan": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": False,
"v6.2.5": True,
"v6.2.7": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"frequency_handoff": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
{
"value": "disable",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
}
],
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
"deny_mac_list": {
"type": "list",
"children": {
"mac": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"id": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"split_tunneling_acl_path": {
"type": "string",
"options": [
{
"value": "tunnel",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "local",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ip_fragment_preventing": {
"multiple_values": True,
"type": "list",
"options": [
{
"value": "tcp-mss-adjust",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "icmp-unreachable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"radio_3": {
"type": "dict",
"children": {
"drma": {
"type": "string",
"options": [
{
"value": "disable",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
{
"value": "enable",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
}
],
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
"transmit_optimize": {
"multiple_values": True,
"type": "list",
"options": [
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "power-save",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "aggr-limit",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "retry-limit",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "send-bar",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"ap_sniffer_bufsize": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"ap_handoff": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": False,
"v7.0.0": False,
"v6.4.4": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"dtim": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"bandwidth_admission_control": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"coexistence": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"auto_power_low": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"ap_sniffer_ctl": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"bandwidth_capacity": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"bss_color": {
"type": "integer",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
"beacon_interval": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"channel_bonding": {
"type": "string",
"options": [
{
"value": "160MHz",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "80MHz",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "40MHz",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "20MHz",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"auto_power_high": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"iperf_server_port": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"vaps": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
},
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"short_guard_interval": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"sam_server_ip": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"sam_cwp_test_url": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"powersave_optimize": {
"multiple_values": True,
"type": "list",
"options": [
{
"value": "tim",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "ac-vo",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "no-obss-scan",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "no-11b-rate",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "client-rate-follow",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"sam_cwp_failure_string": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"auto_power_level": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"ap_sniffer_chan": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"ap_sniffer_addr": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"radio_id": {
"type": "integer",
"revisions": {
"v7.0.1": False,
"v7.0.0": False,
"v6.4.4": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.3": True,
"v6.2.5": False,
"v6.2.7": False
}
},
"sam_cwp_match_string": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"ap_sniffer_data": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"sam_server_type": {
"type": "string",
"options": [
{
"value": "ip",
"revisions": {
"v7.0.1": True
}
},
{
"value": "fqdn",
"revisions": {
"v7.0.1": True
}
}
],
"revisions": {
"v7.0.1": True
}
},
"wids_profile": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"vap_all": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": False,
"v7.0.0": False,
"v6.4.4": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": False,
"v7.0.0": False,
"v6.4.4": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "tunnel",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
{
"value": "bridge",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
{
"value": "manual",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"rts_threshold": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"frag_threshold": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"channel": {
"type": "list",
"children": {
"chan": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
},
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"protection_mode": {
"type": "string",
"options": [
{
"value": "rtscts",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "ctsonly",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"band_5g_type": {
"type": "string",
"options": [
{
"value": "5g-full",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "5g-high",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "5g-low",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": False,
"v6.2.5": True,
"v6.2.7": True
}
},
"sam_security_type": {
"type": "string",
"options": [
{
"value": "open",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "wpa-personal",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "wpa-enterprise",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"power_value": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"ap_sniffer_mgmt_probe": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"amsdu": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"zero_wait_dfs": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": False,
"v6.2.5": True,
"v6.2.7": True
}
},
"spectrum_analysis": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.2.0": True,
"v6.2.3": True,
"v6.4.1": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v6.2.0": True,
"v6.2.3": True,
"v6.4.1": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "scan-only",
"revisions": {
"v6.4.1": True
}
}
],
"revisions": {
"v7.0.1": False,
"v7.0.0": False,
"v6.4.4": False,
"v6.4.0": False,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"channel_utilization": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"frequency_handoff": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": False,
"v7.0.0": False,
"v6.4.4": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"sam_password": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"drma_sensitivity": {
"type": "string",
"options": [
{
"value": "low",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
{
"value": "medium",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
{
"value": "high",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
}
],
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
"sam_cwp_username": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"ap_sniffer_mgmt_beacon": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"ap_sniffer_mgmt_other": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"sam_cwp_success_string": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"sam_cwp_password": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"sam_ssid": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"max_distance": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"sam_test": {
"type": "string",
"options": [
{
"value": "ping",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "iperf",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"call_admission_control": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"power_level": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"max_clients": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"airtime_fairness": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"darrp": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"sam_server_fqdn": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"band": {
"type": "string",
"options": [
{
"value": "802.11a",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11b",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11g",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11n",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11n-5G",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ac",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax-5G",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11n,g-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11g-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11n-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11n-5G-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ac,n-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ac-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax,ac-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax,ac,n-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax-5G-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax,n-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax,n,g-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ac-2G",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"sam_report_intv": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"call_capacity": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"sam_bssid": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"sam_captive_portal": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"iperf_protocol": {
"type": "string",
"options": [
{
"value": "udp",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "tcp",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"auto_power_target": {
"type": "string",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
"sam_server": {
"type": "string",
"revisions": {
"v7.0.1": False,
"v7.0.0": True
}
},
"sam_username": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"mode": {
"type": "string",
"options": [
{
"value": "disabled",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "ap",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "monitor",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "sniffer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "sam",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"power_mode": {
"type": "string",
"options": [
{
"value": "dBm",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "percentage",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
},
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"radio_2": {
"type": "dict",
"children": {
"drma": {
"type": "string",
"options": [
{
"value": "disable",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
{
"value": "enable",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
}
],
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
"transmit_optimize": {
"multiple_values": True,
"type": "list",
"options": [
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "power-save",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "aggr-limit",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "retry-limit",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "send-bar",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ap_sniffer_bufsize": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ap_handoff": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v6.0.5": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v6.0.5": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"sam_bssid": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"dtim": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"bandwidth_admission_control": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"coexistence": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"auto_power_low": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ap_sniffer_ctl": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"bandwidth_capacity": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"bss_color": {
"type": "integer",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
"beacon_interval": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"channel_bonding": {
"type": "string",
"options": [
{
"value": "80MHz",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "40MHz",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "20MHz",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "160MHz",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"auto_power_high": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"iperf_server_port": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"vaps": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"short_guard_interval": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"sam_server_ip": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"sam_cwp_test_url": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"powersave_optimize": {
"multiple_values": True,
"type": "list",
"options": [
{
"value": "tim",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "ac-vo",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "no-obss-scan",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "no-11b-rate",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "client-rate-follow",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"sam_cwp_failure_string": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"auto_power_level": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ap_sniffer_chan": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ap_sniffer_addr": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"spectrum_analysis": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v6.0.5": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v6.0.5": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "scan-only",
"revisions": {
"v6.4.1": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"sam_cwp_match_string": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"ap_sniffer_data": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"sam_server_type": {
"type": "string",
"options": [
{
"value": "ip",
"revisions": {
"v7.0.1": True
}
},
{
"value": "fqdn",
"revisions": {
"v7.0.1": True
}
}
],
"revisions": {
"v7.0.1": True
}
},
"wids_profile": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"vap_all": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "tunnel",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
{
"value": "bridge",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
{
"value": "manual",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"rts_threshold": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"frag_threshold": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"channel": {
"type": "list",
"children": {
"chan": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"protection_mode": {
"type": "string",
"options": [
{
"value": "rtscts",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "ctsonly",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"band_5g_type": {
"type": "string",
"options": [
{
"value": "5g-full",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "5g-high",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "5g-low",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": False,
"v6.2.5": True,
"v6.2.7": True
}
},
"sam_security_type": {
"type": "string",
"options": [
{
"value": "open",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "wpa-personal",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "wpa-enterprise",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"power_value": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"ap_sniffer_mgmt_probe": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"amsdu": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"zero_wait_dfs": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": False,
"v6.2.5": True,
"v6.2.7": True
}
},
"channel_utilization": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"frequency_handoff": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v6.0.5": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v6.0.5": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"sam_password": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"drma_sensitivity": {
"type": "string",
"options": [
{
"value": "low",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
{
"value": "medium",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
{
"value": "high",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
}
],
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
"sam_cwp_username": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"ap_sniffer_mgmt_beacon": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ap_sniffer_mgmt_other": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"sam_cwp_success_string": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"sam_cwp_password": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"sam_ssid": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"max_distance": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"sam_test": {
"type": "string",
"options": [
{
"value": "ping",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "iperf",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"call_admission_control": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"power_level": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"max_clients": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"airtime_fairness": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"darrp": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"sam_server_fqdn": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"band": {
"type": "string",
"options": [
{
"value": "802.11a",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "802.11b",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "802.11g",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "802.11n",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "802.11n-5G",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "802.11ac",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "802.11n,g-only",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "802.11g-only",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "802.11n-only",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "802.11n-5G-only",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "802.11ac,n-only",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "802.11ac-only",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "802.11ax-5G",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax,ac-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax,ac,n-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax-5G-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax,n-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax,n,g-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ac-2G",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"sam_report_intv": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"call_capacity": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"radio_id": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": False,
"v6.2.3": True,
"v6.2.5": False,
"v6.2.7": False,
"v6.0.11": True
}
},
"sam_captive_portal": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"iperf_protocol": {
"type": "string",
"options": [
{
"value": "udp",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "tcp",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"auto_power_target": {
"type": "string",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
"sam_server": {
"type": "string",
"revisions": {
"v7.0.1": False,
"v7.0.0": True
}
},
"sam_username": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"mode": {
"type": "string",
"options": [
{
"value": "disabled",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "ap",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "monitor",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "sniffer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "sam",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"power_mode": {
"type": "string",
"options": [
{
"value": "dBm",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "percentage",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"radio_1": {
"type": "dict",
"children": {
"drma": {
"type": "string",
"options": [
{
"value": "disable",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
{
"value": "enable",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
}
],
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
"transmit_optimize": {
"multiple_values": True,
"type": "list",
"options": [
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "power-save",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "aggr-limit",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "retry-limit",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "send-bar",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ap_sniffer_bufsize": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ap_handoff": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v6.0.5": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v6.0.5": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"sam_bssid": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"dtim": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"bandwidth_admission_control": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"coexistence": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"auto_power_low": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ap_sniffer_ctl": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"bandwidth_capacity": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"bss_color": {
"type": "integer",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
"beacon_interval": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"channel_bonding": {
"type": "string",
"options": [
{
"value": "80MHz",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "40MHz",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "20MHz",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "160MHz",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"auto_power_high": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"iperf_server_port": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"vaps": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"short_guard_interval": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"sam_server_ip": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"sam_cwp_test_url": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"powersave_optimize": {
"multiple_values": True,
"type": "list",
"options": [
{
"value": "tim",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "ac-vo",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "no-obss-scan",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "no-11b-rate",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "client-rate-follow",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"sam_cwp_failure_string": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"auto_power_level": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ap_sniffer_chan": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ap_sniffer_addr": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"spectrum_analysis": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v6.0.5": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v6.0.5": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "scan-only",
"revisions": {
"v6.4.1": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"sam_cwp_match_string": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"ap_sniffer_data": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"sam_server_type": {
"type": "string",
"options": [
{
"value": "ip",
"revisions": {
"v7.0.1": True
}
},
{
"value": "fqdn",
"revisions": {
"v7.0.1": True
}
}
],
"revisions": {
"v7.0.1": True
}
},
"wids_profile": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"vap_all": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "tunnel",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
{
"value": "bridge",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
{
"value": "manual",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"rts_threshold": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"frag_threshold": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"channel": {
"type": "list",
"children": {
"chan": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"protection_mode": {
"type": "string",
"options": [
{
"value": "rtscts",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "ctsonly",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"band_5g_type": {
"type": "string",
"options": [
{
"value": "5g-full",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "5g-high",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "5g-low",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": False,
"v6.2.5": True,
"v6.2.7": True
}
},
"sam_security_type": {
"type": "string",
"options": [
{
"value": "open",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "wpa-personal",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "wpa-enterprise",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"power_value": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"ap_sniffer_mgmt_probe": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"amsdu": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"zero_wait_dfs": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": False,
"v6.2.5": True,
"v6.2.7": True
}
},
"channel_utilization": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"frequency_handoff": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v6.0.5": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v6.0.5": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"sam_password": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"drma_sensitivity": {
"type": "string",
"options": [
{
"value": "low",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
{
"value": "medium",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
{
"value": "high",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
}
],
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
"sam_cwp_username": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"ap_sniffer_mgmt_beacon": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ap_sniffer_mgmt_other": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"sam_cwp_success_string": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"sam_cwp_password": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"sam_ssid": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"max_distance": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"sam_test": {
"type": "string",
"options": [
{
"value": "ping",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "iperf",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"call_admission_control": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"power_level": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"max_clients": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"airtime_fairness": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"darrp": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"sam_server_fqdn": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"band": {
"type": "string",
"options": [
{
"value": "802.11a",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "802.11b",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "802.11g",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "802.11n",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "802.11n-5G",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "802.11ac",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "802.11n,g-only",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "802.11g-only",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "802.11n-only",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "802.11n-5G-only",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "802.11ac,n-only",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "802.11ac-only",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "802.11ax-5G",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax,ac-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax,ac,n-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax-5G-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax,n-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax,n,g-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ac-2G",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"sam_report_intv": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"call_capacity": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"radio_id": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": False,
"v6.2.3": True,
"v6.2.5": False,
"v6.2.7": False,
"v6.0.11": True
}
},
"sam_captive_portal": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"iperf_protocol": {
"type": "string",
"options": [
{
"value": "udp",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "tcp",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"auto_power_target": {
"type": "string",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
"sam_server": {
"type": "string",
"revisions": {
"v7.0.1": False,
"v7.0.0": True
}
},
"sam_username": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"mode": {
"type": "string",
"options": [
{
"value": "disabled",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "ap",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "monitor",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "sniffer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "sam",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"power_mode": {
"type": "string",
"options": [
{
"value": "dBm",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "percentage",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"allowaccess": {
"multiple_values": True,
"type": "list",
"options": [
{
"value": "telnet",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": False,
"v6.2.3": False,
"v6.2.5": False,
"v6.2.7": False,
"v6.0.11": True
}
},
{
"value": "http",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": False,
"v6.2.3": False,
"v6.2.5": False,
"v6.2.7": False,
"v6.0.11": True
}
},
{
"value": "https",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "ssh",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "snmp",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"dtls_in_kernel": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"handoff_sta_thresh": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"lan": {
"type": "dict",
"children": {
"port7_mode": {
"type": "string",
"options": [
{
"value": "offline",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "nat-to-wan",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "bridge-to-wan",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "bridge-to-ssid",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"port3_ssid": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"port_mode": {
"type": "string",
"options": [
{
"value": "offline",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "nat-to-wan",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "bridge-to-wan",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "bridge-to-ssid",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"port_esl_ssid": {
"type": "string",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
"port4_mode": {
"type": "string",
"options": [
{
"value": "offline",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "nat-to-wan",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "bridge-to-wan",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "bridge-to-ssid",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"port4_ssid": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"port3_mode": {
"type": "string",
"options": [
{
"value": "offline",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "nat-to-wan",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "bridge-to-wan",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "bridge-to-ssid",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"port6_mode": {
"type": "string",
"options": [
{
"value": "offline",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "nat-to-wan",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "bridge-to-wan",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "bridge-to-ssid",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"port5_mode": {
"type": "string",
"options": [
{
"value": "offline",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "nat-to-wan",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "bridge-to-wan",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "bridge-to-ssid",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"port6_ssid": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"port_ssid": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"port1_ssid": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"port2_ssid": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"port8_mode": {
"type": "string",
"options": [
{
"value": "offline",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "nat-to-wan",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "bridge-to-wan",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "bridge-to-ssid",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"port5_ssid": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"port_esl_mode": {
"type": "string",
"options": [
{
"value": "offline",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True
}
},
{
"value": "nat-to-wan",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True
}
},
{
"value": "bridge-to-wan",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True
}
},
{
"value": "bridge-to-ssid",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True
}
}
],
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
"port7_ssid": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"port8_ssid": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"port1_mode": {
"type": "string",
"options": [
{
"value": "offline",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "nat-to-wan",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "bridge-to-wan",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "bridge-to-ssid",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"port2_mode": {
"type": "string",
"options": [
{
"value": "offline",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "nat-to-wan",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "bridge-to-wan",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "bridge-to-ssid",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"dtls_policy": {
"multiple_values": True,
"type": "list",
"options": [
{
"value": "clear-text",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "dtls-enabled",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "ipsec-vpn",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"apcfg_profile": {
"type": "string",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
"ext_info_enable": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"radio_4": {
"type": "dict",
"children": {
"drma": {
"type": "string",
"options": [
{
"value": "disable",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
{
"value": "enable",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
}
],
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
"transmit_optimize": {
"multiple_values": True,
"type": "list",
"options": [
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "power-save",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "aggr-limit",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "retry-limit",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "send-bar",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"ap_sniffer_bufsize": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"ap_handoff": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": False,
"v7.0.0": False,
"v6.4.4": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"dtim": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"bandwidth_admission_control": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"coexistence": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"auto_power_low": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"ap_sniffer_ctl": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"bandwidth_capacity": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"bss_color": {
"type": "integer",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
"beacon_interval": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"channel_bonding": {
"type": "string",
"options": [
{
"value": "160MHz",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "80MHz",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "40MHz",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "20MHz",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"auto_power_high": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"iperf_server_port": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"vaps": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
},
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"short_guard_interval": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"sam_server_ip": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"sam_cwp_test_url": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"powersave_optimize": {
"multiple_values": True,
"type": "list",
"options": [
{
"value": "tim",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "ac-vo",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "no-obss-scan",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "no-11b-rate",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "client-rate-follow",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"sam_cwp_failure_string": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"auto_power_level": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"ap_sniffer_chan": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"ap_sniffer_addr": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"spectrum_analysis": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.2.0": True,
"v6.4.1": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v6.2.0": True,
"v6.4.1": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "scan-only",
"revisions": {
"v6.4.1": True
}
}
],
"revisions": {
"v7.0.1": False,
"v7.0.0": False,
"v6.4.4": False,
"v6.4.0": False,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"sam_cwp_match_string": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"ap_sniffer_data": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"sam_server_type": {
"type": "string",
"options": [
{
"value": "ip",
"revisions": {
"v7.0.1": True
}
},
{
"value": "fqdn",
"revisions": {
"v7.0.1": True
}
}
],
"revisions": {
"v7.0.1": True
}
},
"wids_profile": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"vap_all": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": False,
"v7.0.0": False,
"v6.4.4": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": False,
"v7.0.0": False,
"v6.4.4": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "tunnel",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
{
"value": "bridge",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
{
"value": "manual",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"rts_threshold": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"frag_threshold": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"channel": {
"type": "list",
"children": {
"chan": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
},
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"protection_mode": {
"type": "string",
"options": [
{
"value": "rtscts",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "ctsonly",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"band_5g_type": {
"type": "string",
"options": [
{
"value": "5g-full",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "5g-high",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "5g-low",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"sam_security_type": {
"type": "string",
"options": [
{
"value": "open",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "wpa-personal",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "wpa-enterprise",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"power_value": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"ap_sniffer_mgmt_probe": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"amsdu": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"zero_wait_dfs": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"channel_utilization": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"frequency_handoff": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": False,
"v7.0.0": False,
"v6.4.4": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"sam_password": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"drma_sensitivity": {
"type": "string",
"options": [
{
"value": "low",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
{
"value": "medium",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
{
"value": "high",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
}
],
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
"sam_cwp_username": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"ap_sniffer_mgmt_beacon": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"ap_sniffer_mgmt_other": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"sam_cwp_success_string": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"sam_cwp_password": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"sam_ssid": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"max_distance": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"sam_test": {
"type": "string",
"options": [
{
"value": "ping",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "iperf",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"call_admission_control": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"power_level": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"max_clients": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"airtime_fairness": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"darrp": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"sam_server_fqdn": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"band": {
"type": "string",
"options": [
{
"value": "802.11a",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11b",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11g",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11n",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11n-5G",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ac",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax-5G",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11n,g-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11g-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11n-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11n-5G-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ac,n-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ac-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax,ac-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax,ac,n-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax-5G-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax,n-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax,n,g-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ax-only",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "802.11ac-2G",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"sam_report_intv": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"call_capacity": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"sam_bssid": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"sam_captive_portal": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"iperf_protocol": {
"type": "string",
"options": [
{
"value": "udp",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "tcp",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"auto_power_target": {
"type": "string",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
"sam_server": {
"type": "string",
"revisions": {
"v7.0.1": False,
"v7.0.0": True
}
},
"sam_username": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"mode": {
"type": "string",
"options": [
{
"value": "disabled",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "ap",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "monitor",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "sniffer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "sam",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"power_mode": {
"type": "string",
"options": [
{
"value": "dBm",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "percentage",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
},
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": False,
"v6.2.5": True,
"v6.2.7": True
}
},
"poe_mode": {
"type": "string",
"options": [
{
"value": "auto",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "8023af",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "8023at",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "power-adapter",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "full",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
{
"value": "high",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
{
"value": "low",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"console_login": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True
}
}
],
"revisions": {
"v7.0.1": True
}
},
"ap_country": {
"type": "string",
"options": [
{
"value": "NA",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "AL",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "DZ",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "AO",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "AR",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "AM",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "AU",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "AT",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "AZ",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "BH",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "BD",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "BB",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "BY",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "BE",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "BZ",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "BO",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "BA",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "BR",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "BN",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "BG",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "KH",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "CL",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "CN",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "CO",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "CR",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "HR",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "CY",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "CZ",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "DK",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "DO",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "EC",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "EG",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "SV",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "EE",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "FI",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "FR",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "GE",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "DE",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "GR",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "GL",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "GD",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "GU",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "GT",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "HT",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "HN",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "HK",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "HU",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "IS",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "IN",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "ID",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "IR",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": False,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "IE",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "IL",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "IT",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "JM",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "JO",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "KZ",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "KE",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "KP",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": False,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "KR",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "KW",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "LV",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "LB",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "LI",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "LT",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "LU",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "MO",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "MK",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "MY",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "MT",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "MX",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "MC",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "MA",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "MZ",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "MM",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "NP",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "NL",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "AN",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "AW",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "NZ",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "NO",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "OM",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "PK",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "PA",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "PG",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "PY",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "PE",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "PH",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "PL",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "PT",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "PR",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "QA",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "RO",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "RU",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "RW",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "SA",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "RS",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "ME",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "SG",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "SK",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "SI",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "ZA",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "ES",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "LK",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "SE",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "SD",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": False,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "CH",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "SY",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": False,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "TW",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "TZ",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "TH",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "TT",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "TN",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "TR",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "AE",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "UA",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "GB",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "US",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "PS",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "UY",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "UZ",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "VE",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "VN",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "YE",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "ZB",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": False,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "ZW",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "JP",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "CA",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "CF",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "BS",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
{
"value": "BF",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
{
"value": "KY",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
{
"value": "CX",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
{
"value": "GH",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
{
"value": "GY",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
{
"value": "CI",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
{
"value": "MW",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
{
"value": "MV",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
{
"value": "FM",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
{
"value": "KN",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
{
"value": "LC",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
{
"value": "VC",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
{
"value": "SN",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
{
"value": "TM",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
{
"value": "TC",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
{
"value": "UG",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
{
"value": "VU",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
{
"value": "AF",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "AS",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "BJ",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "BM",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "BT",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "BW",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "CM",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "TD",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "CG",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "CD",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "DM",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "ET",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "GF",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "PF",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "FO",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "FJ",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "GI",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "GP",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "IQ",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "IM",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "LA",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "LS",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "LY",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "MG",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "ML",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "MH",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "MQ",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "MR",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "MU",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "YT",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "MD",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "NI",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "NE",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "MP",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "PW",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "RE",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "BL",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "MF",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "PM",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "SL",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "SR",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "TG",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "VI",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "WF",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "ZM",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "--",
"revisions": {
"v7.0.1": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"energy_efficient_ethernet": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"handoff_rssi": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ap_handoff": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
{
"value": "disable",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
}
],
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
"max_clients": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"name": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"login_passwd": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"handoff_roaming": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"split_tunneling_acl_local_ap_subnet": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"esl_ses_dongle": {
"type": "dict",
"children": {
"output_power": {
"type": "string",
"options": [
{
"value": "a",
"revisions": {
"v7.0.1": True
}
},
{
"value": "b",
"revisions": {
"v7.0.1": True
}
},
{
"value": "c",
"revisions": {
"v7.0.1": True
}
},
{
"value": "d",
"revisions": {
"v7.0.1": True
}
},
{
"value": "e",
"revisions": {
"v7.0.1": True
}
},
{
"value": "f",
"revisions": {
"v7.0.1": True
}
},
{
"value": "g",
"revisions": {
"v7.0.1": True
}
},
{
"value": "h",
"revisions": {
"v7.0.1": True
}
}
],
"revisions": {
"v7.0.1": True
}
},
"apc_fqdn": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"tls_fqdn_verification": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True
}
}
],
"revisions": {
"v7.0.1": True
}
},
"coex_level": {
"type": "string",
"options": [
{
"value": "none",
"revisions": {
"v7.0.1": True
}
}
],
"revisions": {
"v7.0.1": True
}
},
"tls_cert_verification": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True
}
}
],
"revisions": {
"v7.0.1": True
}
},
"compliance_level": {
"type": "string",
"options": [
{
"value": "compliance-level-2",
"revisions": {
"v7.0.1": True
}
}
],
"revisions": {
"v7.0.1": True
}
},
"apc_addr_type": {
"type": "string",
"options": [
{
"value": "fqdn",
"revisions": {
"v7.0.1": True
}
},
{
"value": "ip",
"revisions": {
"v7.0.1": True
}
}
],
"revisions": {
"v7.0.1": True
}
},
"esl_channel": {
"type": "string",
"options": [
{
"value": "-1",
"revisions": {
"v7.0.1": True
}
},
{
"value": "0",
"revisions": {
"v7.0.1": True
}
},
{
"value": "1",
"revisions": {
"v7.0.1": True
}
},
{
"value": "2",
"revisions": {
"v7.0.1": True
}
},
{
"value": "3",
"revisions": {
"v7.0.1": True
}
},
{
"value": "4",
"revisions": {
"v7.0.1": True
}
},
{
"value": "5",
"revisions": {
"v7.0.1": True
}
},
{
"value": "6",
"revisions": {
"v7.0.1": True
}
},
{
"value": "7",
"revisions": {
"v7.0.1": True
}
},
{
"value": "8",
"revisions": {
"v7.0.1": True
}
},
{
"value": "9",
"revisions": {
"v7.0.1": True
}
},
{
"value": "10",
"revisions": {
"v7.0.1": True
}
},
{
"value": "127",
"revisions": {
"v7.0.1": True
}
}
],
"revisions": {
"v7.0.1": True
}
},
"apc_ip": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"apc_port": {
"type": "integer",
"revisions": {
"v7.0.1": True
}
},
"scd_enable": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True
}
}
],
"revisions": {
"v7.0.1": True
}
}
},
"revisions": {
"v7.0.1": True
}
},
"led_schedules": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"wan_port_mode": {
"type": "string",
"options": [
{
"value": "wan-lan",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "wan-only",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"led_state": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"indoor_outdoor_deployment": {
"type": "string",
"options": [
{
"value": "platform-determined",
"revisions": {
"v7.0.1": True
}
},
{
"value": "outdoor",
"revisions": {
"v7.0.1": True
}
},
{
"value": "indoor",
"revisions": {
"v7.0.1": True
}
}
],
"revisions": {
"v7.0.1": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = 'name'
fields = {
"access_token": {"required": False, "type": "str", "no_log": True},
"enable_log": {"required": False, "type": bool},
"vdom": {"required": False, "type": "str", "default": "root"},
"member_path": {"required": False, "type": "str"},
"member_state": {
"type": "str",
"required": False,
"choices": ["present", "absent"]
},
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"wireless_controller_wtp_profile": {
"required": False, "type": "dict", "default": None,
"options": {
}
}
}
for attribute_name in module_spec['options']:
fields["wireless_controller_wtp_profile"]['options'][attribute_name] = module_spec['options'][attribute_name]
if mkeyname and mkeyname == attribute_name:
fields["wireless_controller_wtp_profile"]['options'][attribute_name]['required'] = True
check_legacy_fortiosapi()
module = AnsibleModule(argument_spec=fields,
supports_check_mode=True)
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if 'access_token' in module.params:
connection.set_option('access_token', module.params['access_token'])
if 'enable_log' in module.params:
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, "wireless_controller_wtp_profile")
is_error, has_changed, result = fortios_wireless_controller(module.params, fos, module.check_mode)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if versions_check_result and versions_check_result['matched'] is False:
module.warn("Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv")
if not is_error:
if versions_check_result and versions_check_result['matched'] is False:
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result)
else:
module.exit_json(changed=has_changed, meta=result)
else:
if versions_check_result and versions_check_result['matched'] is False:
module.fail_json(msg="Error in repo", version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| 39.183357
| 158
| 0.20926
| 55,426
| 777,868
| 2.910638
| 0.020442
| 0.264621
| 0.145446
| 0.058763
| 0.912667
| 0.899365
| 0.889193
| 0.883081
| 0.879727
| 0.876554
| 0
| 0.133999
| 0.680966
| 777,868
| 19,851
| 159
| 39.185331
| 0.51607
| 0.001127
| 0
| 0.77221
| 0
| 0.004045
| 0.276358
| 0.004841
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000405
| false
| 0.001669
| 0.000506
| 0.000051
| 0.001719
| 0.000051
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d0a720f3631a531060eeed8152480eccad538e8d
| 1,397
|
py
|
Python
|
game/consumeableRoom.py
|
kim-tran/text-based-atk
|
eb33a39f1fcb70729d9c3a5a147923c5c95a1fe8
|
[
"MIT"
] | null | null | null |
game/consumeableRoom.py
|
kim-tran/text-based-atk
|
eb33a39f1fcb70729d9c3a5a147923c5c95a1fe8
|
[
"MIT"
] | null | null | null |
game/consumeableRoom.py
|
kim-tran/text-based-atk
|
eb33a39f1fcb70729d9c3a5a147923c5c95a1fe8
|
[
"MIT"
] | null | null | null |
import consumable
from tiles import LootRoom
class FindCookieRoom(LootRoom):
def __init__(self, x, y):
super().__init__(x, y, items.Cookie())
def intro_text(self):
return """
You notice something shiny in the corner.
It/'s a {}! You pick it up.
""".format(self.name)
class FindBreadRoom(LootRoom):
def __init__(self, x, y):
super().__init__(x, y, items.Bread())
def intro_text(self):
return """
You notice something shiny in the corner.
It/'s {}! You pick it up.
""".format(self.name)
class FindWMYogurtRoom(LootRoom):
def __init__(self, x, y):
super().__init__(x, y, items.WMYogurt())
def intro_text(self):
return """
You notice something shiny in the corner.
It/'s {}! You pick it up.
""".format(self.name)
class FindFFYogurtRoom(LootRoom):
def __init__(self, x, y):
super().__init__(x, y, items.FFYogurt())
def intro_text(self):
return """
You notice something shiny in the corner.
It/'s {}! You pick it up.
""".format(self.name)
class FindJellyRoom(LootRoom):
def __init__(self, x, y):
super().__init__(x, y, items.Jelly())
def intro_text(self):
return """
You notice something shiny in the corner.
It/'s {}! You pick it up.
""".format(self.name)
| 28.510204
| 49
| 0.586256
| 182
| 1,397
| 4.252747
| 0.21978
| 0.02584
| 0.096899
| 0.122739
| 0.807494
| 0.807494
| 0.807494
| 0.807494
| 0.768734
| 0.768734
| 0
| 0
| 0.277738
| 1,397
| 48
| 50
| 29.104167
| 0.767096
| 0
| 0
| 0.690476
| 0
| 0
| 0.334288
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.238095
| false
| 0
| 0.047619
| 0.119048
| 0.52381
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 8
|
ef75b9fb6b7f94a2cd882e0fe902dc01131e84c9
| 156
|
py
|
Python
|
001.py
|
tangcg/githelper
|
1686129ba60ebaede4cac90ac8f3a94e2ad10fa2
|
[
"Apache-2.0"
] | null | null | null |
001.py
|
tangcg/githelper
|
1686129ba60ebaede4cac90ac8f3a94e2ad10fa2
|
[
"Apache-2.0"
] | 1
|
2019-10-22T09:13:24.000Z
|
2019-10-22T09:21:31.000Z
|
001.py
|
tangcg/githelper
|
1686129ba60ebaede4cac90ac8f3a94e2ad10fa2
|
[
"Apache-2.0"
] | 1
|
2019-10-22T08:54:31.000Z
|
2019-10-22T08:54:31.000Z
|
print('this repo is belong to nbnitboy')
print('this line is added by tangcg !')
print('删除原始仓库,添加forked仓库后,提交代码 !')
print('this line is added by tangcg')
| 22.285714
| 40
| 0.724359
| 25
| 156
| 4.52
| 0.56
| 0.238938
| 0.230089
| 0.265487
| 0.495575
| 0.495575
| 0.495575
| 0
| 0
| 0
| 0
| 0
| 0.147436
| 156
| 6
| 41
| 26
| 0.849624
| 0
| 0
| 0
| 0
| 0
| 0.730769
| 0.147436
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
ef95051a184b53b9e70e65905bb6fea609ab7062
| 26,161
|
py
|
Python
|
selectinf/randomized/sandbox/convenience.py
|
TianXie1999/selective-inference
|
ca02bbd84af5f5597944c75bde8337db9c69066a
|
[
"BSD-3-Clause"
] | 51
|
2016-03-31T16:34:15.000Z
|
2022-01-16T04:32:58.000Z
|
selectinf/randomized/sandbox/convenience.py
|
TianXie1999/selective-inference
|
ca02bbd84af5f5597944c75bde8337db9c69066a
|
[
"BSD-3-Clause"
] | 11
|
2016-04-07T00:19:58.000Z
|
2021-10-03T18:31:14.000Z
|
selectinf/randomized/sandbox/convenience.py
|
TianXie1999/selective-inference
|
ca02bbd84af5f5597944c75bde8337db9c69066a
|
[
"BSD-3-Clause"
] | 14
|
2015-10-28T17:29:05.000Z
|
2021-08-16T21:04:30.000Z
|
"""
Classes encapsulating some common workflows in randomized setting
"""
from copy import copy
import numpy as np
import regreg.api as rr
from .glm import (glm_greedy_step,
glm_threshold_score,
pairs_bootstrap_glm)
from .randomization import randomization
from .query import multiple_queries
from .lasso import lasso
class step(lasso):
r"""
A class for maximizing some coordinates of the
randomized score of a GLM. The problem we are
solving is
.. math::
\text{minimize}_{\eta} (\nabla \ell(\bar{\beta}_E) - \omega)^T\eta
subject to $\|\eta_g\|_2/w_g \leq 1$ where $w_g$ are group weights.
The set of variables $E$ are variables we have partially maximized over
and $\bar{\beta}_E$ should be viewed as padded out with zeros
over all variables in $E^c$.
"""
def __init__(self,
loglike,
feature_weights,
candidate,
randomizer_scale,
active=None,
randomizer='gaussian',
parametric_cov_estimator=False):
r"""
Create a new post-selection for the stepwise problem
Parameters
----------
loglike : `regreg.smooth.glm.glm`
A (negative) log-likelihood as implemented in `regreg`.
feature_weights : np.ndarray
Feature weights for L-1 penalty. If a float,
it is brodcast to all features.
candidate : np.bool
Which groups of variables are candidates
for inclusion in this step.
randomizer_scale : float
Scale for IID components of randomization.
active : np.bool (optional)
Which groups of variables make up $E$, the
set of variables we partially minimize over.
randomizer : str (optional)
One of ['laplace', 'logistic', 'gaussian']
"""
self.active = active
self.candidate = candidate
self.loglike = loglike
self.nfeature = p = loglike.shape[0]
if np.asarray(feature_weights).shape == ():
feature_weights = np.ones(loglike.shape) * feature_weights
self.feature_weights = np.asarray(feature_weights)
self.parametric_cov_estimator = parametric_cov_estimator
nrandom = candidate.sum()
if randomizer == 'laplace':
self.randomizer = randomization.laplace((nrandom,), scale=randomizer_scale)
elif randomizer == 'gaussian':
self.randomizer = randomization.isotropic_gaussian((nrandom,),randomizer_scale)
elif randomizer == 'logistic':
self.randomizer = randomization.logistic((nrandom,), scale=randomizer_scale)
self.penalty = rr.group_lasso(np.arange(p),
weights=dict(zip(np.arange(p), self.feature_weights)), lagrange=1.)
def fit(self,
views=[]):
"""
Find the maximizing group.
Parameters
----------
solve_args : keyword args
Passed to `regreg.problems.simple_problem.solve`.
views : list
Other views of the data, e.g. cross-validation.
Returns
-------
sign_beta : np.float
Support and non-zero signs of randomized lasso solution.
"""
p = self.nfeature
self._view = glm_greedy_step(self.loglike,
self.penalty,
self.active,
self.candidate,
self.randomizer)
self._view.solve()
views = copy(views); views.append(self._view)
self._queries = multiple_queries(views)
self._queries.solve()
self.maximizing_group = self._view.selection_variable['maximizing_group']
return self.maximizing_group
def decompose_subgradient(self,
conditioning_groups=None,
marginalizing_groups=None):
"""
Marginalize over some if candidate part of subgradient
if applicable.
Parameters
----------
conditioning_groups : np.bool
Which groups' subgradients should we condition on.
marginalizing_groups : np.bool
Which groups' subgradients should we marginalize over.
Returns
-------
None
"""
raise NotImplementedError
@staticmethod
def gaussian(X,
Y,
feature_weights,
candidate=None,
active=None,
randomizer_scale=None,
parametric_cov_estimator=False,
randomizer='gaussian'):
r"""
Take a step with a Gaussian loglikelihood.
Parameters
----------
X : ndarray
Shape (n,p) -- the design matrix.
Y : ndarray
Shape (n,) -- the response.
feature_weights: [float, sequence]
Penalty weights. An intercept, or other unpenalized
features are handled by setting those entries of
`feature_weights` to 0. If `feature_weights` is
a float, then all parameters are penalized equally.
candidate : np.bool (optional)
Which groups of variables are candidates
for inclusion in this step. Defaults to ~active.
active : np.bool (optional)
Which groups of variables make up $E$, the
set of variables we partially minimize over.
Defaults to `np.zeros(p, np.bool)`.
randomizer_scale : float
Scale for IID components of randomizer.
randomizer : str
One of ['laplace', 'logistic', 'gaussian']
Returns
-------
L : `selection.randomized.convenience.step`
"""
loglike = rr.glm.gaussian(X, Y)
n, p = X.shape
if active is None:
active = np.zeros(p, np.bool)
if candidate is None:
candidate = ~active
if randomizer_scale is None:
mean_diag = np.mean((X**2).sum(0))
randomizer_scale = np.sqrt(mean_diag) * 0.5 * np.std(Y)
return step(loglike,
feature_weights,
candidate,
randomizer_scale,
active=active,
randomizer=randomizer,
parametric_cov_estimator=parametric_cov_estimator)
@staticmethod
def logistic(X,
successes,
feature_weights,
active=None,
candidate=None,
trials=None,
parametric_cov_estimator=False,
randomizer_scale=None,
randomizer='gaussian'):
r"""
Take a step with a logistic loglikelihood.
Parameters
----------
X : ndarray
Shape (n,p) -- the design matrix.
successes : ndarray
Shape (n,) -- response vector. An integer number of successes.
For data that is proportions, multiply the proportions
by the number of trials first.
feature_weights: [float, sequence]
Penalty weights. An intercept, or other unpenalized
features are handled by setting those entries of
`feature_weights` to 0. If `feature_weights` is
a float, then all parameters are penalized equally.
candidate : np.bool (optional)
Which groups of variables are candidates
for inclusion in this step. Defaults to ~active.
active : np.bool (optional)
Which groups of variables make up $E$, the
set of variables we partially minimize over.
Defaults to `np.zeros(p, np.bool)`.
trials : ndarray (optional)
Number of trials per response, defaults to
ones the same shape as Y.
randomizer_scale : float
Scale for IID components of randomizer.
randomizer : str
One of ['laplace', 'logistic', 'gaussian']
Returns
-------
L : `selection.randomized.convenience.step`
"""
n, p = X.shape
loglike = rr.glm.logistic(X, successes, trials=trials)
if active is None:
active = np.zeros(p, np.bool)
if candidate is None:
candidate = ~active
if randomizer_scale is None:
mean_diag = np.mean((X**2).sum(0))
randomizer_scale = np.sqrt(mean_diag) * 0.5
return step(loglike,
feature_weights,
candidate,
randomizer_scale,
active=active,
parametric_cov_estimator=parametric_cov_estimator)
@staticmethod
def coxph(X,
times,
status,
feature_weights,
candidate=None,
active=None,
parametric_cov_estimator=False,
randomizer_scale=None,
randomizer='gaussian'):
r"""
Take a step with a Cox partial loglikelihood.
Uses Efron's tie breaking method.
Parameters
----------
X : ndarray
Shape (n,p) -- the design matrix.
times : ndarray
Shape (n,) -- the survival times.
status : ndarray
Shape (n,) -- the censoring status.
feature_weights: [float, sequence]
Penalty weights. An intercept, or other unpenalized
features are handled by setting those entries of
`feature_weights` to 0. If `feature_weights` is
a float, then all parameters are penalized equally.
candidate : np.bool (optional)
Which groups of variables are candidates
for inclusion in this step. Defaults to ~active.
active : np.bool (optional)
Which groups of variables make up $E$, the
set of variables we partially minimize over.
Defaults to `np.zeros(p, np.bool)`.
randomizer_scale : float
Scale for IID components of randomizer.
randomizer : str
One of ['laplace', 'logistic', 'gaussian']
Returns
-------
L : `selection.randomized.convenience.lasso`
"""
n, p = X.shape
loglike = coxph_obj(X, times, status)
if active is None:
active = np.zeros(p, np.bool)
if candidate is None:
candidate = ~active
if randomizer_scale is None:
randomizer_scale = 1. / np.sqrt(n)
return step(loglike,
feature_weights,
candidate,
randomizer_scale,
active=active,
randomizer=randomizer,
parametric_cov_estimator=parametric_cov_estimator)
@staticmethod
def poisson(X,
counts,
feature_weights,
candidate=None,
active=None,
parametric_cov_estimator=False,
randomizer_scale=None,
randomizer='gaussian'):
r"""
Take a step with a Poisson loglikelihood.
Parameters
----------
X : ndarray
Shape (n,p) -- the design matrix.
counts : ndarray
Shape (n,) -- the response.
feature_weights: [float, sequence]
Penalty weights. An intercept, or other unpenalized
features are handled by setting those entries of
`feature_weights` to 0. If `feature_weights` is
a float, then all parameters are penalized equally.
candidate : np.bool (optional)
Which groups of variables are candidates
for inclusion in this step. Defaults to ~active.
active : np.bool (optional)
Which groups of variables make up $E$, the
set of variables we partially minimize over.
Defaults to `np.zeros(p, np.bool)`.
randomizer_scale : float
Scale for IID components of randomizer.
randomizer : str
One of ['laplace', 'logistic', 'gaussian']
Returns
-------
L : `selection.randomized.convenience.step`
"""
n, p = X.shape
loglike = rr.glm.poisson(X, counts)
# scale for randomizer seems kind of meaningless here...
if active is None:
active = np.zeros(p, np.bool)
if candidate is None:
candidate = ~active
mean_diag = np.mean((X**2).sum(0))
if randomizer_scale is None:
randomizer_scale = np.sqrt(mean_diag) * 0.5 * np.std(counts)
return step(loglike,
feature_weights,
candidate,
randomizer_scale,
active=active,
randomizer=randomizer,
parametric_cov_estimator=parametric_cov_estimator)
class threshold(lasso):
r"""
A class for thresholding some coordinates of the
randomized score of a GLM. The problem we are
solving is
.. math::
\text{minimize}_{\eta: |\eta_i| \leq \tau_i} \frac{1}{2}\|\nabla \ell(\bar{\beta}_E) + \omega - \eta\|^2_2
The set of variables $E$ are variables we have partially maximized over
and $\bar{\beta}_E$ should be viewed as padded out with zeros
over all variables in $E^c$.
"""
def __init__(self,
loglike,
threshold_value,
candidate,
randomizer_scale,
active=None,
randomizer='gaussian',
parametric_cov_estimator=False):
r"""
Create a new post-selection for the stepwise problem
Parameters
----------
loglike : `regreg.smooth.glm.glm`
A (negative) log-likelihood as implemented in `regreg`.
threshold_value : [float, sequence]
Thresholding for each feature. If 1d defaults
it is treated as a multiple of np.ones.
candidate : np.bool
Which groups of variables are candidates
for thresholding.
randomizer_scale : float
Scale for IID components of randomization.
active : np.bool (optional)
Which groups of variables make up $E$, the
set of variables we partially minimize over.
randomizer : str (optional)
One of ['laplace', 'logistic', 'gaussian']
"""
self.active = active
self.candidate = candidate
self.loglike = loglike
self.nfeature = p = self.loglike.shape[0]
if np.asarray(threshold_value).shape == ():
threshold = np.ones(loglike.shape) * threshold_value
self.threshold_value = np.asarray(threshold_value)[self.candidate]
self.parametric_cov_estimator = parametric_cov_estimator
nrandom = candidate.sum()
if randomizer == 'laplace':
self.randomizer = randomization.laplace((nrandom,), scale=randomizer_scale)
elif randomizer == 'gaussian':
self.randomizer = randomization.isotropic_gaussian((nrandom,),randomizer_scale)
elif randomizer == 'logistic':
self.randomizer = randomization.logistic((nrandom,), scale=randomizer_scale)
def fit(self,
views=[]):
"""
Find the maximizing group.
Parameters
----------
solve_args : keyword args
Passed to `regreg.problems.simple_problem.solve`.
views : list
Other views of the data, e.g. cross-validation.
Returns
-------
sign_beta : np.float
Support and non-zero signs of randomized lasso solution.
"""
p = self.nfeature
self._view = glm_threshold_score(self.loglike,
self.threshold_value,
self.randomizer,
self.active,
self.candidate)
self._view.solve()
views = copy(views); views.append(self._view)
self._queries = multiple_queries(views)
self._queries.solve()
self.boundary = self._view.selection_variable['boundary_set']
return self.boundary
def decompose_subgradient(self,
conditioning_groups=None,
marginalizing_groups=None):
"""
Marginalize over some if candidate part of subgradient
if applicable.
Parameters
----------
conditioning_groups : np.bool
Which groups' subgradients should we condition on.
marginalizing_groups : np.bool
Which groups' subgradients should we marginalize over.
Returns
-------
None
"""
raise NotImplementedError
@staticmethod
def gaussian(X,
Y,
threshold_value,
candidate=None,
active=None,
parametric_cov_estimator=False,
randomizer_scale=None,
randomizer='gaussian'):
r"""
Take a step with a Gaussian loglikelihood.
Parameters
----------
X : ndarray
Shape (n,p) -- the design matrix.
Y : ndarray
Shape (n,) -- the response.
threshold_value : [float, sequence]
Penalty weights. An intercept, or other unpenalized
features are handled by setting those entries of
`threshold` to 0. If `threshold` is
a float, then all parameters are penalized equally.
candidate : np.bool (optional)
Which groups of variables are candidates
for inclusion in this step. Defaults to ~active.
active : np.bool (optional)
Which groups of variables make up $E$, the
set of variables we partially minimize over.
Defaults to `np.zeros(p, np.bool)`.
randomizer_scale : float
Scale for IID components of randomizer.
randomizer : str
One of ['laplace', 'logistic', 'gaussian']
Returns
-------
L : `selection.randomized.convenience.threshold`
"""
loglike = rr.glm.gaussian(X, Y)
n, p = X.shape
if active is None:
active = np.zeros(p, np.bool)
if candidate is None:
candidate = ~active
if randomizer_scale is None:
mean_diag = np.mean((X**2).sum(0))
randomizer_scale = np.sqrt(mean_diag) * 0.5 * np.std(Y)
return threshold(loglike,
threshold_value,
candidate,
randomizer_scale,
active=active,
randomizer=randomizer,
parametric_cov_estimator=parametric_cov_estimator)
@staticmethod
def logistic(X,
successes,
threshold_value,
active=None,
candidate=None,
trials=None,
parametric_cov_estimator=False,
randomizer_scale=None,
randomizer='gaussian'):
r"""
Take a step with a logistic loglikelihood.
Parameters
----------
X : ndarray
Shape (n,p) -- the design matrix.
successes : ndarray
Shape (n,) -- response vector. An integer number of successes.
For data that is proportions, multiply the proportions
by the number of trials first.
threshold_value : [float, sequence]
Penalty weights. An intercept, or other unpenalized
features are handled by setting those entries of
`threshold` to 0. If `threshold` is
a float, then all parameters are penalized equally.
candidate : np.bool (optional)
Which groups of variables are candidates
for inclusion in this step. Defaults to ~active.
active : np.bool (optional)
Which groups of variables make up $E$, the
set of variables we partially minimize over.
Defaults to `np.zeros(p, np.bool)`.
trials : ndarray (optional)
Number of trials per response, defaults to
ones the same shape as Y.
randomizer_scale : float
Scale for IID components of randomizer.
randomizer : str
One of ['laplace', 'logistic', 'gaussian']
Returns
-------
L : `selection.randomized.convenience.threshold`
"""
n, p = X.shape
loglike = rr.glm.logistic(X, successes, trials=trials)
if active is None:
active = np.zeros(p, np.bool)
if candidate is None:
candidate = ~active
if randomizer_scale is None:
mean_diag = np.mean((X**2).sum(0))
randomizer_scale = np.sqrt(mean_diag) * 0.5
return threshold(loglike,
threshold_value,
candidate,
randomizer_scale,
active=active,
parametric_cov_estimator=parametric_cov_estimator)
@staticmethod
def coxph(X,
times,
status,
threshold_value,
candidate=None,
active=None,
parametric_cov_estimator=False,
randomizer_scale=None,
randomizer='gaussian'):
r"""
Take a step with a Cox partial loglikelihood.
Uses Efron's tie breaking method.
Parameters
----------
X : ndarray
Shape (n,p) -- the design matrix.
times : ndarray
Shape (n,) -- the survival times.
status : ndarray
Shape (n,) -- the censoring status.
threshold_value : [float, sequence]
Penalty weights. An intercept, or other unpenalized
features are handled by setting those entries of
`threshold` to 0. If `threshold` is
a float, then all parameters are penalized equally.
candidate : np.bool (optional)
Which groups of variables are candidates
for inclusion in this step. Defaults to ~active.
active : np.bool (optional)
Which groups of variables make up $E$, the
set of variables we partially minimize over.
Defaults to `np.zeros(p, np.bool)`.
randomizer_scale : float
Scale for IID components of randomizer.
randomizer : str
One of ['laplace', 'logistic', 'gaussian']
Returns
-------
L : `selection.randomized.convenience.threshold`
"""
n, p = X.shape
loglike = coxph_obj(X, times, status)
if active is None:
active = np.zeros(p, np.bool)
if candidate is None:
candidate = ~active
if randomizer_scale is None:
randomizer_scale = 1. / np.sqrt(n)
return threshold(loglike,
threshold_value,
candidate,
randomizer_scale,
active=active,
randomizer=randomizer,
parametric_cov_estimator=parametric_cov_estimator)
@staticmethod
def poisson(X,
counts,
threshold_value,
candidate=None,
active=None,
parametric_cov_estimator=False,
randomizer_scale=None,
randomizer='gaussian'):
r"""
Take a step with a Poisson loglikelihood.
Parameters
----------
X : ndarray
Shape (n,p) -- the design matrix.
counts : ndarray
Shape (n,) -- the response.
threshold_value : [float, sequence]
Penalty weights. An intercept, or other unpenalized
features are handled by setting those entries of
`threshold` to 0. If `threshold` is
a float, then all parameters are penalized equally.
candidate : np.bool (optional)
Which groups of variables are candidates
for inclusion in this step. Defaults to ~active.
active : np.bool (optional)
Which groups of variables make up $E$, the
set of variables we partially minimize over.
Defaults to `np.zeros(p, np.bool)`.
randomizer_scale : float
Scale for IID components of randomizer.
randomizer : str
One of ['laplace', 'logistic', 'gaussian']
Returns
-------
L : `selection.randomized.convenience.threshold`
"""
n, p = X.shape
loglike = rr.glm.poisson(X, counts)
# scale for randomizer seems kind of meaningless here...
if active is None:
active = np.zeros(p, np.bool)
if candidate is None:
candidate = ~active
mean_diag = np.mean((X**2).sum(0))
if randomizer_scale is None:
randomizer_scale = np.sqrt(mean_diag) * 0.5 * np.std(counts)
return threshold(loglike,
threshold_value,
candidate,
randomizer_scale,
active=active,
randomizer=randomizer,
parametric_cov_estimator=parametric_cov_estimator)
| 30.035591
| 114
| 0.54092
| 2,656
| 26,161
| 5.236822
| 0.092997
| 0.053922
| 0.047451
| 0.031634
| 0.917104
| 0.911352
| 0.899993
| 0.89798
| 0.89798
| 0.89798
| 0
| 0.002778
| 0.380719
| 26,161
| 870
| 115
| 30.070115
| 0.85575
| 0.438706
| 0
| 0.85342
| 0
| 0
| 0.012979
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045603
| false
| 0
| 0.022801
| 0
| 0.107492
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
efd2a247fae8587bac2d4eb9a6d4264e72cf6f5d
| 39
|
py
|
Python
|
tracklib/io/__init__.py
|
SGrosse-Holz/tracklib
|
e0b88e3959db2ce65869d8292ce5792f4c77c7a4
|
[
"MIT"
] | 1
|
2022-01-30T15:10:51.000Z
|
2022-01-30T15:10:51.000Z
|
tracklib/io/__init__.py
|
SGrosse-Holz/tracklib
|
e0b88e3959db2ce65869d8292ce5792f4c77c7a4
|
[
"MIT"
] | null | null | null |
tracklib/io/__init__.py
|
SGrosse-Holz/tracklib
|
e0b88e3959db2ce65869d8292ce5792f4c77c7a4
|
[
"MIT"
] | null | null | null |
from . import load
from . import write
| 13
| 19
| 0.74359
| 6
| 39
| 4.833333
| 0.666667
| 0.689655
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.205128
| 39
| 2
| 20
| 19.5
| 0.935484
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
323c5799f0ba54cbf2cf6e2913cac55568cb9079
| 8,061
|
py
|
Python
|
datasets/weedcluster.py
|
carolinepotts/DeepLabV3Plus-Pytorch
|
208ae68a5d0265be5aafa39c2e27bd9beb380346
|
[
"MIT"
] | null | null | null |
datasets/weedcluster.py
|
carolinepotts/DeepLabV3Plus-Pytorch
|
208ae68a5d0265be5aafa39c2e27bd9beb380346
|
[
"MIT"
] | null | null | null |
datasets/weedcluster.py
|
carolinepotts/DeepLabV3Plus-Pytorch
|
208ae68a5d0265be5aafa39c2e27bd9beb380346
|
[
"MIT"
] | null | null | null |
import os
import torch
from skimage import io, transform
from torch.utils.data import Dataset, DataLoader
from torchvision import transforms, utils
from PIL import Image
import numpy as np
class WeedClusterDataset(Dataset):
"""weed_cluster class only dataset. Use when only detecting weed clusters to minimize memory usage."""
def __init__(self, root, split='train'):
"""
Args:
root (string): Directory that includes directory of images and directory of labels. Should end in '/'
"""
if split not in ['train', 'test', 'val']:
raise ValueError('Invalid split for mode! Please use split="train", split="test"'
' or split="val"')
self.split = split
self.root = root
self.images = os.listdir(self.root + split + '/images/rgb/')
# @classmethod
# def decode_target(cls, target):
# return target
def __len__(self):
return len(self.images)
def __getitem__(self, index):
img = torch.from_numpy(np.array(Image.open(self.root + self.split + '/images/rgb/' + self.images[index]).convert('RGB')).reshape((3,512,512)))
# print("img shape: ", img.shape)
target = torch.from_numpy((np.array(Image.open(self.root + self.split + '/labels/weed_cluster/' + self.images[index][:-4] + '.png'))/255).astype(int))
# print("target shape: ", target.shape)
# print("target: ", target)
return img, target
class CloudShadowDataset(Dataset):
"""cloud_shadow class only dataset. Use when only detecting cloud shadows to minimize memory usage."""
def __init__(self, root, split='train'):
"""
Args:
root (string): Directory that includes directory of images and directory of labels. Should end in '/'
"""
if split not in ['train', 'test', 'val']:
raise ValueError('Invalid split for mode! Please use split="train", split="test"'
' or split="val"')
self.split = split
self.root = root
self.images = os.listdir(self.root + split + '/images/rgb/')
# @classmethod
# def decode_target(cls, target):
# return target
def __len__(self):
return len(self.images)
def __getitem__(self, index):
img = torch.from_numpy(np.array(Image.open(self.root + self.split + '/images/rgb/' + self.images[index]).convert('RGB')).reshape((3,512,512)))
# print("img shape: ", img.shape)
target = torch.from_numpy((np.array(Image.open(self.root + self.split + '/labels/cloud_shadow/' + self.images[index][:-4] + '.png'))/255).astype(int))
# print("target shape: ", target.shape)
# print("target: ", target)
return img, target
class DoublePlantDataset(Dataset):
"""double_plant class only dataset. Use when only detecting double plants to minimize memory usage."""
def __init__(self, root, split='train'):
"""
Args:
root (string): Directory that includes directory of images and directory of labels. Should end in '/'
"""
if split not in ['train', 'test', 'val']:
raise ValueError('Invalid split for mode! Please use split="train", split="test"'
' or split="val"')
self.split = split
self.root = root
self.images = os.listdir(self.root + split + '/images/rgb/')
# @classmethod
# def decode_target(cls, target):
# return target
def __len__(self):
return len(self.images)
def __getitem__(self, index):
img = torch.from_numpy(np.array(Image.open(self.root + self.split + '/images/rgb/' + self.images[index]).convert('RGB')).reshape((3,512,512)))
# print("img shape: ", img.shape)
target = torch.from_numpy((np.array(Image.open(self.root + self.split + '/labels/double_plant/' + self.images[index][:-4] + '.png'))/255).astype(int))
# print("target shape: ", target.shape)
# print("target: ", target)
return img, target
class PlanterSkipDataset(Dataset):
"""planter_skip class only dataset. Use when only detecting planter skips to minimize memory usage."""
def __init__(self, root, split='train'):
"""
Args:
root (string): Directory that includes directory of images and directory of labels. Should end in '/'
"""
if split not in ['train', 'test', 'val']:
raise ValueError('Invalid split for mode! Please use split="train", split="test"'
' or split="val"')
self.split = split
self.root = root
self.images = os.listdir(self.root + split + '/images/rgb/')
# @classmethod
# def decode_target(cls, target):
# return target
def __len__(self):
return len(self.images)
def __getitem__(self, index):
img = torch.from_numpy(np.array(Image.open(self.root + self.split + '/images/rgb/' + self.images[index]).convert('RGB')).reshape((3,512,512)))
# print("img shape: ", img.shape)
target = torch.from_numpy((np.array(Image.open(self.root + self.split + '/labels/planter_skip/' + self.images[index][:-4] + '.png'))/255).astype(int))
# print("target shape: ", target.shape)
# print("target: ", target)
return img, target
class StandingWaterDataset(Dataset):
"""standing_water class only dataset. Use when only detecting standing water to minimize memory usage."""
def __init__(self, root, split='train'):
"""
Args:
root (string): Directory that includes directory of images and directory of labels. Should end in '/'
"""
if split not in ['train', 'test', 'val']:
raise ValueError('Invalid split for mode! Please use split="train", split="test"'
' or split="val"')
self.split = split
self.root = root
self.images = os.listdir(self.root + split + '/images/rgb/')
# @classmethod
# def decode_target(cls, target):
# return target
def __len__(self):
return len(self.images)
def __getitem__(self, index):
img = torch.from_numpy(np.array(Image.open(self.root + self.split + '/images/rgb/' + self.images[index]).convert('RGB')).reshape((3,512,512)))
# print("img shape: ", img.shape)
target = torch.from_numpy((np.array(Image.open(self.root + self.split + '/labels/standing_water/' + self.images[index][:-4] + '.png'))/255).astype(int))
# print("target shape: ", target.shape)
# print("target: ", target)
return img, target
class WaterwayDataset(Dataset):
"""waterway class only dataset. Use when only detecting waterways to minimize memory usage."""
def __init__(self, root, split='train'):
"""
Args:
root (string): Directory that includes directory of images and directory of labels. Should end in '/'
"""
if split not in ['train', 'test', 'val']:
raise ValueError('Invalid split for mode! Please use split="train", split="test"'
' or split="val"')
self.split = split
self.root = root
self.images = os.listdir(self.root + split + '/images/rgb/')
# @classmethod
# def decode_target(cls, target):
# return target
def __len__(self):
return len(self.images)
def __getitem__(self, index):
img = torch.from_numpy(np.array(Image.open(self.root + self.split + '/images/rgb/' + self.images[index]).convert('RGB')).reshape((3,512,512)))
# print("img shape: ", img.shape)
target = torch.from_numpy((np.array(Image.open(self.root + self.split + '/labels/waterway/' + self.images[index][:-4] + '.png'))/255).astype(int))
# print("target shape: ", target.shape)
# print("target: ", target)
return img, target
| 37.84507
| 161
| 0.593227
| 974
| 8,061
| 4.806982
| 0.104723
| 0.05126
| 0.033319
| 0.041008
| 0.890859
| 0.890859
| 0.890859
| 0.844724
| 0.844724
| 0.844724
| 0
| 0.011113
| 0.263243
| 8,061
| 213
| 162
| 37.84507
| 0.777235
| 0.274035
| 0
| 0.791209
| 0
| 0
| 0.156044
| 0.019104
| 0
| 0
| 0
| 0
| 0
| 1
| 0.197802
| false
| 0
| 0.076923
| 0.065934
| 0.472527
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
32a5e501d33fe1c4f73fc9b11de548b4c7db87ea
| 6,468
|
py
|
Python
|
pythonlibs/mantis/sg/fisher/ctp/loader.py
|
adoggie/Tibet.6
|
3c53060edafd80b9c4dafa10699a68d86a410c66
|
[
"MIT"
] | 22
|
2019-10-28T07:28:12.000Z
|
2022-03-19T15:36:41.000Z
|
pythonlibs/mantis/sg/fisher/ctp/loader.py
|
adoggie/Tibet.6
|
3c53060edafd80b9c4dafa10699a68d86a410c66
|
[
"MIT"
] | 1
|
2019-11-07T04:54:14.000Z
|
2019-11-07T07:12:48.000Z
|
pythonlibs/mantis/sg/fisher/ctp/loader.py
|
adoggie/Tibet.6
|
3c53060edafd80b9c4dafa10699a68d86a410c66
|
[
"MIT"
] | 13
|
2019-10-28T07:29:07.000Z
|
2021-11-03T06:53:12.000Z
|
#coding:utf-8
"""
ctp策略运行加载器定义
"""
from pymongo import MongoClient
from mantis.sg.fisher.utils.importutils import import_module
from mantis.sg.fisher.utils.useful import singleton
from mantis.fundamental.utils.timeutils import current_date_string
from mantis.sg.fisher import stbase
from mantis.sg.fisher import ams
from mantis.sg.fisher import strecoder
from mantis.sg.fisher import stsim
from mantis.sg.fisher import stgenerator
from mantis.sg.fisher.model import model
from mantis.sg.fisher.ctp.backtest import CtpMarketBarBackTest,CtpTraderBackTest
from mantis.sg.fisher import stutils
from mantis.sg.fisher.stbase.loader import StrategyLoader
class CtpBackTestLoader(StrategyLoader):
"""回测加载器类"""
def __init__(self):
StrategyLoader.__init__(self)
self.cfgs = {}
def load(self,strategy_id,
strategy_cls,
symbol,
strategy_db=('127.0.0.1',27017),
dbname='TradeFisherCtp',
quotes_db=('127.0.0.1',27017),
cycle=1, start='2019-6-18 9:0', end='2019-6-19 15:0', freq=.1):
"""
:param strategy_id: 策略编号
:param strategy_cls: 策略类定义
:param symbol: 商品合约代码
:param strategy_db: 策略运行数据库(host,port)
:param dbname: 策略运行数据库名
:param quotes_db: 行情数据库
:param cycle: K线周期 1,5,15,30
:param start: 回测加载数据开始时间
:param end: 回测加载数据结束时间
:param freq: K线回放速度控制 .1 s
:return:
"""
SYMBOL = symbol
mongodb_host, mongodb_port = strategy_db
data_path = './'+strategy_id
quotas_db_conn = MongoClient(quotes_db[0], quotes_db[1]) # 历史k线数据库
# 初始化系统参数控制器
paramctrl = stbase.MongoParamController()
paramctrl.open(host=mongodb_host, port=mongodb_port, dbname=dbname)
# 策略控制器
stbase.controller.init(data_path)
# 添加运行日志处理
stbase.controller.getLogger().addAppender(stbase.FileLogAppender('CTP'))
stbase.controller.setParamController(paramctrl)
param = paramctrl.get(strategy_id) # 读取指定策略id的参数
# conn_url = paramctrl.get_conn_url(param.conn_url) # 读取策略相关的交易账户信息
# 初始化行情对象
params = dict(db_conn=quotas_db_conn, cycle= cycle, symbol=SYMBOL,
start=start, end=end, freq=freq)
# params.update( conn_url.dict() )
market = CtpMarketBarBackTest().init(**params) # 配置历史行情记录加载器
# 装备行情对象到股票产品
stbase.controller.futures.setupMarket(market)
# 初始化交易对象
# trader = CtpTrader().init(**conn_url.dict())
trader = CtpTraderBackTest().init()
stbase.controller.futures.setupTrader(trader)
# 初始化策略对象
strategy = strategy_cls(strategy_id, stbase.controller.futures).init().setLoader(self)
# 设置策略日志对象
strategy.getLogger().addAppender(
strecoder.StragetyLoggerMongoDBAppender(db_prefix=dbname, host=mongodb_host, port=mongodb_port))
# 添加策略到 控制器
stbase.controller.addStrategy(strategy)
self.strategy = strategy
return self
def getTradeObject(self,bar):
"""模拟获得当前最新的Tick数据"""
tradeobj = self.strategy.product.getTradeObject(bar.code)
tradeobj.price.LastPrice = bar.close
tradeobj.price.AskPrice1 = bar.close
tradeobj.price.BidPrice1 = bar.close
return tradeobj
def run(self):
# 控制器运行
stbase.controller.run() # 开始运行 ,加载k线数据
class CtpStrategyLoader(StrategyLoader):
"""策略加载器类"""
def __init__(self):
StrategyLoader.__init__(self)
self.cfgs = {}
def load(self,strategy_id,
strategy_cls,
symbol,
strategy_db=('127.0.0.1',27017),
dbname='TradeFisherCtp',
quotes_db=('127.0.0.1',27017),
cycle=1, start='2019-6-18 9:0', end='2019-6-19 15:0', freq=.1):
"""
:param strategy_id: 策略编号
:param strategy_cls: 策略类定义
:param symbol: 商品合约代码
:param strategy_db: 策略运行数据库(host,port)
:param dbname: 策略运行数据库名
:param quotes_db: 行情数据库
:param cycle: K线周期 1,5,15,30
:param start: 回测加载数据开始时间
:param end: 回测加载数据结束时间
:param freq: K线回放速度控制 .1 s
:return:
"""
SYMBOL = symbol
mongodb_host, mongodb_port = strategy_db
data_path = './'+strategy_id
quotas_db_conn = MongoClient(quotes_db[0], quotes_db[1]) # 历史k线数据库
# 初始化系统参数控制器
paramctrl = stbase.MongoParamController()
paramctrl.open(host=mongodb_host, port=mongodb_port, dbname=dbname)
# 策略控制器
stbase.controller.init(data_path)
# 添加运行日志处理
stbase.controller.getLogger().addAppender(stbase.FileLogAppender('CTP'))
stbase.controller.setParamController(paramctrl)
param = paramctrl.get(strategy_id) # 读取指定策略id的参数
# conn_url = paramctrl.get_conn_url(param.conn_url) # 读取策略相关的交易账户信息
# 初始化行情对象
params = dict(db_conn=quotas_db_conn, cycle= cycle, symbol=SYMBOL,
start=start, end=end, freq=freq)
# params.update( conn_url.dict() )
market = CtpMarketBarBackTest().init(**params) # 配置历史行情记录加载器
# 装备行情对象到股票产品
stbase.controller.futures.setupMarket(market)
# 初始化交易对象
# trader = CtpTrader().init(**conn_url.dict())
trader = CtpTraderBackTest().init()
stbase.controller.futures.setupTrader(trader)
# 初始化策略对象
strategy = strategy_cls(strategy_id, stbase.controller.futures).init().setLoader(self)
# 设置策略日志对象
strategy.getLogger().addAppender(
strecoder.StragetyLoggerMongoDBAppender(db_prefix=dbname, host=mongodb_host, port=mongodb_port))
# 添加策略到 控制器
stbase.controller.addStrategy(strategy)
self.strategy = strategy
return self
def getTradeObject(self,bar):
tradeobj = self.strategy.product.getTradeObject(bar.code)
return tradeobj
def run(self):
# 控制器运行
stbase.controller.run() # 开始运行 ,加载k线数据
"""
mnogodb query statements
----------------------
db.getCollection('AJ_Test1_20190426').find({event:{$in:['order','order_cancel']}},{order_id:1,direction:1,code:1,price:1,oc:1,time:1,quantity:1,_id:0,event:1}).sort({time:-1})
"""
| 33.340206
| 175
| 0.623222
| 696
| 6,468
| 5.656609
| 0.225575
| 0.065024
| 0.033528
| 0.050292
| 0.825502
| 0.777242
| 0.777242
| 0.752858
| 0.752858
| 0.752858
| 0
| 0.026582
| 0.267161
| 6,468
| 194
| 176
| 33.340206
| 0.804008
| 0.202999
| 0
| 0.804348
| 0
| 0
| 0.027491
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.086957
| false
| 0
| 0.141304
| 0
| 0.293478
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
32bba3a32dc25830cb67c2cb96553602845dcb6b
| 139
|
py
|
Python
|
Chapter09/readcount1.py
|
kaushalkumarshah/Learn-Python-in-7-Days
|
2663656767c8959ace836f0c0e272f3e501bbe6e
|
[
"MIT"
] | 12
|
2018-07-09T16:20:31.000Z
|
2022-03-21T22:52:15.000Z
|
Chapter09/readcount1.py
|
kaushalkumarshah/Learn-Python-in-7-Days
|
2663656767c8959ace836f0c0e272f3e501bbe6e
|
[
"MIT"
] | null | null | null |
Chapter09/readcount1.py
|
kaushalkumarshah/Learn-Python-in-7-Days
|
2663656767c8959ace836f0c0e272f3e501bbe6e
|
[
"MIT"
] | 19
|
2018-01-09T12:49:06.000Z
|
2021-11-23T08:05:55.000Z
|
file_input = open("sample1.txt",'r')
print file_input.read(20)
print file_input.read(15)
print file_input.read(10)
file_input.close()
| 27.8
| 38
| 0.741007
| 24
| 139
| 4.083333
| 0.5
| 0.459184
| 0.428571
| 0.55102
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056452
| 0.107914
| 139
| 5
| 39
| 27.8
| 0.733871
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.6
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
08ba80a90e4722bf1fa533da5b808e14f9598e28
| 6,683
|
py
|
Python
|
loldib/getratings/models/NA/na_skarner/na_skarner_sup.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_skarner/na_skarner_sup.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_skarner/na_skarner_sup.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Skarner_Sup_Aatrox(Ratings):
pass
class NA_Skarner_Sup_Ahri(Ratings):
pass
class NA_Skarner_Sup_Akali(Ratings):
pass
class NA_Skarner_Sup_Alistar(Ratings):
pass
class NA_Skarner_Sup_Amumu(Ratings):
pass
class NA_Skarner_Sup_Anivia(Ratings):
pass
class NA_Skarner_Sup_Annie(Ratings):
pass
class NA_Skarner_Sup_Ashe(Ratings):
pass
class NA_Skarner_Sup_AurelionSol(Ratings):
pass
class NA_Skarner_Sup_Azir(Ratings):
pass
class NA_Skarner_Sup_Bard(Ratings):
pass
class NA_Skarner_Sup_Blitzcrank(Ratings):
pass
class NA_Skarner_Sup_Brand(Ratings):
pass
class NA_Skarner_Sup_Braum(Ratings):
pass
class NA_Skarner_Sup_Caitlyn(Ratings):
pass
class NA_Skarner_Sup_Camille(Ratings):
pass
class NA_Skarner_Sup_Cassiopeia(Ratings):
pass
class NA_Skarner_Sup_Chogath(Ratings):
pass
class NA_Skarner_Sup_Corki(Ratings):
pass
class NA_Skarner_Sup_Darius(Ratings):
pass
class NA_Skarner_Sup_Diana(Ratings):
pass
class NA_Skarner_Sup_Draven(Ratings):
pass
class NA_Skarner_Sup_DrMundo(Ratings):
pass
class NA_Skarner_Sup_Ekko(Ratings):
pass
class NA_Skarner_Sup_Elise(Ratings):
pass
class NA_Skarner_Sup_Evelynn(Ratings):
pass
class NA_Skarner_Sup_Ezreal(Ratings):
pass
class NA_Skarner_Sup_Fiddlesticks(Ratings):
pass
class NA_Skarner_Sup_Fiora(Ratings):
pass
class NA_Skarner_Sup_Fizz(Ratings):
pass
class NA_Skarner_Sup_Galio(Ratings):
pass
class NA_Skarner_Sup_Gangplank(Ratings):
pass
class NA_Skarner_Sup_Garen(Ratings):
pass
class NA_Skarner_Sup_Gnar(Ratings):
pass
class NA_Skarner_Sup_Gragas(Ratings):
pass
class NA_Skarner_Sup_Graves(Ratings):
pass
class NA_Skarner_Sup_Hecarim(Ratings):
pass
class NA_Skarner_Sup_Heimerdinger(Ratings):
pass
class NA_Skarner_Sup_Illaoi(Ratings):
pass
class NA_Skarner_Sup_Irelia(Ratings):
pass
class NA_Skarner_Sup_Ivern(Ratings):
pass
class NA_Skarner_Sup_Janna(Ratings):
pass
class NA_Skarner_Sup_JarvanIV(Ratings):
pass
class NA_Skarner_Sup_Jax(Ratings):
pass
class NA_Skarner_Sup_Jayce(Ratings):
pass
class NA_Skarner_Sup_Jhin(Ratings):
pass
class NA_Skarner_Sup_Jinx(Ratings):
pass
class NA_Skarner_Sup_Kalista(Ratings):
pass
class NA_Skarner_Sup_Karma(Ratings):
pass
class NA_Skarner_Sup_Karthus(Ratings):
pass
class NA_Skarner_Sup_Kassadin(Ratings):
pass
class NA_Skarner_Sup_Katarina(Ratings):
pass
class NA_Skarner_Sup_Kayle(Ratings):
pass
class NA_Skarner_Sup_Kayn(Ratings):
pass
class NA_Skarner_Sup_Kennen(Ratings):
pass
class NA_Skarner_Sup_Khazix(Ratings):
pass
class NA_Skarner_Sup_Kindred(Ratings):
pass
class NA_Skarner_Sup_Kled(Ratings):
pass
class NA_Skarner_Sup_KogMaw(Ratings):
pass
class NA_Skarner_Sup_Leblanc(Ratings):
pass
class NA_Skarner_Sup_LeeSin(Ratings):
pass
class NA_Skarner_Sup_Leona(Ratings):
pass
class NA_Skarner_Sup_Lissandra(Ratings):
pass
class NA_Skarner_Sup_Lucian(Ratings):
pass
class NA_Skarner_Sup_Lulu(Ratings):
pass
class NA_Skarner_Sup_Lux(Ratings):
pass
class NA_Skarner_Sup_Malphite(Ratings):
pass
class NA_Skarner_Sup_Malzahar(Ratings):
pass
class NA_Skarner_Sup_Maokai(Ratings):
pass
class NA_Skarner_Sup_MasterYi(Ratings):
pass
class NA_Skarner_Sup_MissFortune(Ratings):
pass
class NA_Skarner_Sup_MonkeyKing(Ratings):
pass
class NA_Skarner_Sup_Mordekaiser(Ratings):
pass
class NA_Skarner_Sup_Morgana(Ratings):
pass
class NA_Skarner_Sup_Nami(Ratings):
pass
class NA_Skarner_Sup_Nasus(Ratings):
pass
class NA_Skarner_Sup_Nautilus(Ratings):
pass
class NA_Skarner_Sup_Nidalee(Ratings):
pass
class NA_Skarner_Sup_Nocturne(Ratings):
pass
class NA_Skarner_Sup_Nunu(Ratings):
pass
class NA_Skarner_Sup_Olaf(Ratings):
pass
class NA_Skarner_Sup_Orianna(Ratings):
pass
class NA_Skarner_Sup_Ornn(Ratings):
pass
class NA_Skarner_Sup_Pantheon(Ratings):
pass
class NA_Skarner_Sup_Poppy(Ratings):
pass
class NA_Skarner_Sup_Quinn(Ratings):
pass
class NA_Skarner_Sup_Rakan(Ratings):
pass
class NA_Skarner_Sup_Rammus(Ratings):
pass
class NA_Skarner_Sup_RekSai(Ratings):
pass
class NA_Skarner_Sup_Renekton(Ratings):
pass
class NA_Skarner_Sup_Rengar(Ratings):
pass
class NA_Skarner_Sup_Riven(Ratings):
pass
class NA_Skarner_Sup_Rumble(Ratings):
pass
class NA_Skarner_Sup_Ryze(Ratings):
pass
class NA_Skarner_Sup_Sejuani(Ratings):
pass
class NA_Skarner_Sup_Shaco(Ratings):
pass
class NA_Skarner_Sup_Shen(Ratings):
pass
class NA_Skarner_Sup_Shyvana(Ratings):
pass
class NA_Skarner_Sup_Singed(Ratings):
pass
class NA_Skarner_Sup_Sion(Ratings):
pass
class NA_Skarner_Sup_Sivir(Ratings):
pass
class NA_Skarner_Sup_Skarner(Ratings):
pass
class NA_Skarner_Sup_Sona(Ratings):
pass
class NA_Skarner_Sup_Soraka(Ratings):
pass
class NA_Skarner_Sup_Swain(Ratings):
pass
class NA_Skarner_Sup_Syndra(Ratings):
pass
class NA_Skarner_Sup_TahmKench(Ratings):
pass
class NA_Skarner_Sup_Taliyah(Ratings):
pass
class NA_Skarner_Sup_Talon(Ratings):
pass
class NA_Skarner_Sup_Taric(Ratings):
pass
class NA_Skarner_Sup_Teemo(Ratings):
pass
class NA_Skarner_Sup_Thresh(Ratings):
pass
class NA_Skarner_Sup_Tristana(Ratings):
pass
class NA_Skarner_Sup_Trundle(Ratings):
pass
class NA_Skarner_Sup_Tryndamere(Ratings):
pass
class NA_Skarner_Sup_TwistedFate(Ratings):
pass
class NA_Skarner_Sup_Twitch(Ratings):
pass
class NA_Skarner_Sup_Udyr(Ratings):
pass
class NA_Skarner_Sup_Urgot(Ratings):
pass
class NA_Skarner_Sup_Varus(Ratings):
pass
class NA_Skarner_Sup_Vayne(Ratings):
pass
class NA_Skarner_Sup_Veigar(Ratings):
pass
class NA_Skarner_Sup_Velkoz(Ratings):
pass
class NA_Skarner_Sup_Vi(Ratings):
pass
class NA_Skarner_Sup_Viktor(Ratings):
pass
class NA_Skarner_Sup_Vladimir(Ratings):
pass
class NA_Skarner_Sup_Volibear(Ratings):
pass
class NA_Skarner_Sup_Warwick(Ratings):
pass
class NA_Skarner_Sup_Xayah(Ratings):
pass
class NA_Skarner_Sup_Xerath(Ratings):
pass
class NA_Skarner_Sup_XinZhao(Ratings):
pass
class NA_Skarner_Sup_Yasuo(Ratings):
pass
class NA_Skarner_Sup_Yorick(Ratings):
pass
class NA_Skarner_Sup_Zac(Ratings):
pass
class NA_Skarner_Sup_Zed(Ratings):
pass
class NA_Skarner_Sup_Ziggs(Ratings):
pass
class NA_Skarner_Sup_Zilean(Ratings):
pass
class NA_Skarner_Sup_Zyra(Ratings):
pass
| 16.026379
| 46
| 0.77151
| 972
| 6,683
| 4.878601
| 0.151235
| 0.203712
| 0.407423
| 0.494728
| 0.808941
| 0.808941
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166243
| 6,683
| 416
| 47
| 16.064904
| 0.851041
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
3ef160893797f29230b2e4f4010c074ff8ba7656
| 193
|
py
|
Python
|
src/elasticizefiles/base/__init__.py
|
pierluigi-failla/elasticize_files
|
2530d74f1b56344ee73ca113bcb2870566a565a0
|
[
"MIT"
] | null | null | null |
src/elasticizefiles/base/__init__.py
|
pierluigi-failla/elasticize_files
|
2530d74f1b56344ee73ca113bcb2870566a565a0
|
[
"MIT"
] | null | null | null |
src/elasticizefiles/base/__init__.py
|
pierluigi-failla/elasticize_files
|
2530d74f1b56344ee73ca113bcb2870566a565a0
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created by Pierluigi on 2020-02-02
project: elasticizefiles
"""
from elasticizefiles.base.extractor import Extractor
from elasticizefiles.base.elastic import Elastic
| 27.571429
| 52
| 0.777202
| 24
| 193
| 6.25
| 0.666667
| 0.253333
| 0.306667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.052326
| 0.108808
| 193
| 7
| 53
| 27.571429
| 0.819767
| 0.42487
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
de75f74d20fd5676db62dd833a493c8fb2ebcf4e
| 11,756
|
py
|
Python
|
tests/unit/test_measure.py
|
EwertonBello/measures
|
c1e3dd8b6d6720f331f5e6a65e19f716f3f4d4a5
|
[
"MIT"
] | 19
|
2015-01-14T16:44:49.000Z
|
2020-05-10T04:12:26.000Z
|
tests/unit/test_measure.py
|
EwertonBello/measures
|
c1e3dd8b6d6720f331f5e6a65e19f716f3f4d4a5
|
[
"MIT"
] | 1
|
2020-03-19T18:47:28.000Z
|
2020-03-19T18:47:28.000Z
|
tests/unit/test_measure.py
|
EwertonBello/measures
|
c1e3dd8b6d6720f331f5e6a65e19f716f3f4d4a5
|
[
"MIT"
] | 9
|
2015-01-16T00:42:12.000Z
|
2020-10-02T19:06:57.000Z
|
# -*- coding: utf-8 -*-
from unittest import TestCase
from measures import Measure
from mock import patch
import json
import socket
from nose_focus import focus
class MeasureTestCase(TestCase):
def test_must_create_a_measure_object_with_correct_attributes(self):
measure = Measure('myclient', ('localhost', 1984))
self.assertEqual(measure.client, 'myclient')
self.assertEqual(measure.addresses, [('localhost', 1984)])
self.assertEqual(
measure.socket.getsockopt(socket.SOL_SOCKET, socket.SO_TYPE),
socket.SOCK_DGRAM)
# socket is non-blocking
self.assertEqual(measure.socket.gettimeout(), 0.0)
class BaseMeasureTestCase(TestCase):
def setUp(self):
self.measure = Measure('myclient', ('localhost', 1984))
class MeasureCountTestCase(BaseMeasureTestCase):
@patch('socket.socket.sendto')
def test_must_send_a_packet_to_correct_address(self, mock_sendto):
self.measure.count('mymetric')
self.assertEqual(mock_sendto.call_count, 1)
self.assertEqual(mock_sendto.call_args[0][1], ('localhost', 1984))
@patch('socket.socket.sendto')
def test_must_send_a_packet_with_counter_of_one(self, mock_sendto):
self.measure.count('mymetric')
self.assertEqual(mock_sendto.call_count, 1)
expected_message = {'client': 'myclient', 'metric': 'mymetric', 'count': 1}
message = json.loads(mock_sendto.call_args[0][0].decode('utf-8'))
self.assertDictEqual(message, expected_message)
@patch('socket.socket.sendto')
def test_must_send_a_packet_with_metric_counter_of_ten(self, mock_sendto):
self.measure.count('mymetric', counter=10)
self.assertEqual(mock_sendto.call_count, 1)
expected_message = {'client': 'myclient', 'metric': 'mymetric', 'count': 10}
message = json.loads(mock_sendto.call_args[0][0].decode('utf-8'))
self.assertDictEqual(message, expected_message)
@patch('socket.socket.sendto')
def test_must_send_packet_with_dimensions(self, mock_sendto):
self.measure.count('mymetric', dimensions={'name': 'john'})
self.assertEqual(mock_sendto.call_count, 1)
expected_message = {
'client': 'myclient',
'metric': 'mymetric',
'count': 1,
'name': 'john',
}
message = json.loads(mock_sendto.call_args[0][0].decode('utf-8'))
self.assertDictEqual(message, expected_message)
@patch('socket.socket.sendto')
def test_dimensions_must_not_override_parameters(self, mock_sendto):
self.measure.count('mymetric', dimensions={'client': 'otherclient', 'metric': 'othermetric', 'count': 10})
self.assertEqual(mock_sendto.call_count, 1)
expected_message = {
'client': 'myclient',
'metric': 'mymetric',
'count': 1,
}
message = json.loads(mock_sendto.call_args[0][0].decode('utf-8'))
self.assertDictEqual(message, expected_message)
@patch('socket.socket.sendto')
def test_must_not_change_dimensions_dict(self, mock_sendto):
dimensions = {}
self.measure.count('mymetric', dimensions=dimensions)
self.assertFalse(dimensions)
@patch('socket.socket.sendto', side_effect=socket.error)
def test_must_not_throw_socket_exception(self, mock_sendto):
try:
self.measure.count('mymetric')
except socket.error:
self.fail('socket.error raised from count')
@patch('socket.socket.sendto', side_effect=socket.error(80, 'error'))
@patch('measures.logger.error')
def test_must_log_socket_error(self, mock_warn, mock_sendto):
self.measure.count('mymetric')
mock_warn.assert_called_once_with('Error on sendto. [Errno 80 - error]')
class MeasureTimeTestCase(BaseMeasureTestCase):
@patch('socket.socket.sendto')
def test_must_send_a_packet_to_correct_address(self, mock_sendto):
with self.measure.time('mymetric'):
pass
self.assertEqual(mock_sendto.call_count, 1)
self.assertEqual(mock_sendto.call_args[0][1], ('localhost', 1984))
@patch('socket.socket.sendto')
def test_must_send_packet_with_time_spent(self, mock_sendto):
with self.measure.time('mymetric'):
pass
self.assertEqual(mock_sendto.call_count, 1)
message = json.loads(mock_sendto.call_args[0][0].decode('utf-8'))
self.assertEqual(len(message), 5)
self.assertIn('client', message)
self.assertEqual(message['client'], 'myclient')
self.assertIn('metric', message)
self.assertEqual(message['metric'], 'mymetric')
self.assertIn('time', message)
self.assertIsInstance(message['time'], float)
self.assertGreater(message['time'], 0)
self.assertIn('error_type', message)
self.assertEqual(message['error_type'], '')
self.assertIn('error_value', message)
self.assertEqual(message['error_value'], '')
@patch('socket.socket.sendto')
def test_must_send_packet_with_dimensions(self, mock_sendto):
with self.measure.time('mymetric') as dimensions:
dimensions['name'] = 'john'
self.assertEqual(mock_sendto.call_count, 1)
message = json.loads(mock_sendto.call_args[0][0].decode('utf-8'))
self.assertEqual(len(message), 6)
self.assertIn('client', message)
self.assertEqual(message['client'], 'myclient')
self.assertIn('metric', message)
self.assertEqual(message['metric'], 'mymetric')
self.assertIn('time', message)
self.assertIsInstance(message['time'], float)
self.assertGreater(message['time'], 0)
self.assertIn('error_type', message)
self.assertEqual(message['error_type'], '')
self.assertIn('error_value', message)
self.assertEqual(message['error_value'], '')
self.assertIn('name', message)
self.assertEqual(message['name'], 'john')
@patch('socket.socket.sendto')
def test_dimensions_must_not_override_parameters(self, mock_sendto):
with self.measure.time('mymetric') as dimensions:
dimensions['client'] = 'otherclient'
dimensions['metric'] = 'othermetric'
dimensions['time'] = -1
dimensions['error_type'] = 'othertype'
dimensions['error_value'] = 'othervalue'
self.assertEqual(mock_sendto.call_count, 1)
message = json.loads(mock_sendto.call_args[0][0].decode('utf-8'))
self.assertEqual(len(message), 5)
self.assertIn('client', message)
self.assertEqual(message['client'], 'myclient')
self.assertIn('metric', message)
self.assertEqual(message['metric'], 'mymetric')
self.assertIn('time', message)
self.assertIsInstance(message['time'], float)
self.assertGreater(message['time'], 0)
self.assertIn('error_type', message)
self.assertEqual(message['error_type'], '')
self.assertIn('error_value', message)
self.assertEqual(message['error_value'], '')
@patch('socket.socket.sendto')
def test_must_not_change_dimensions_dict(self, mock_sendto):
with self.measure.time('mymetric') as dimensions:
dimensions = {}
self.assertFalse(dimensions)
@patch('socket.socket.sendto', side_effect=socket.error)
def test_must_not_throw_socket_exception(self, mock_sendto):
try:
with self.measure.time('mymetric'):
pass
except socket.error:
self.fail('socket.error raised from time')
@patch('socket.socket.sendto', side_effect=socket.error(80, 'error'))
@patch('measures.logger.error')
def test_must_log_socket_error(self, mock_warn, mock_sendto):
with self.measure.time('mymetric'):
pass
mock_warn.assert_called_once_with('Error on sendto. [Errno 80 - error]')
@patch('socket.socket.sendto')
def test_must_send_packet_with_time_spent_on_error(self, mock_sendto):
with self.assertRaises(ValueError):
with self.measure.time('mymetric'):
raise ValueError('foo')
self.assertEqual(mock_sendto.call_count, 1)
message = json.loads(mock_sendto.call_args[0][0].decode('utf-8'))
self.assertEqual(len(message), 5)
self.assertIn('client', message)
self.assertEqual(message['client'], 'myclient')
self.assertIn('metric', message)
self.assertEqual(message['metric'], 'mymetric')
self.assertIn('time', message)
self.assertIsInstance(message['time'], float)
self.assertGreater(message['time'], 0)
self.assertIn('error_type', message)
self.assertEqual(message['error_type'], str(ValueError))
self.assertIn('error_value', message)
self.assertEqual(message['error_value'], 'foo')
@patch('socket.socket.sendto')
def test_must_send_packet_with_dimensions_on_error(self, mock_sendto):
with self.assertRaises(ValueError):
with self.measure.time('mymetric') as dimensions:
dimensions['name'] = 'john'
raise ValueError('foo')
self.assertEqual(mock_sendto.call_count, 1)
message = json.loads(mock_sendto.call_args[0][0].decode('utf-8'))
self.assertIn('name', message)
self.assertEqual(message['name'], 'john')
class MeasureSendTestCase(BaseMeasureTestCase):
@patch('socket.socket.sendto')
def test_must_send_a_packet_to_correct_address(self, mock_sendto):
self.measure.send('mymetric', None)
self.assertEqual(mock_sendto.call_count, 1)
self.assertEqual(mock_sendto.call_args[0][1], ('localhost', 1984))
@patch('socket.socket.sendto')
def test_must_send_packet_with_dimensions(self, mock_sendto):
d = {
'count': 1,
'name': 'john'
}
self.measure.send('mymetric', d)
self.assertEqual(mock_sendto.call_count, 1)
message = json.loads(mock_sendto.call_args[0][0].decode('utf-8'))
self.assertEqual(len(message), 4)
self.assertIn('client', message)
self.assertEqual(message['client'], 'myclient')
self.assertIn('metric', message)
self.assertEqual(message['metric'], 'mymetric')
self.assertIn('count', message)
self.assertIsInstance(message['count'], int)
self.assertEqual(message['count'], 1)
self.assertIn('name', message)
self.assertEqual(message['name'], 'john')
@patch('socket.socket.sendto', side_effect=socket.error(80, 'error'))
@patch('measures.logger.error')
def test_must_log_socket_error(self, mock_warn, mock_sendto):
self.measure.send('mymetric', None)
mock_warn.assert_called_once_with('Error on sendto. [Errno 80 - error]')
@patch('socket.socket.sendto')
def test_dimensions_must_not_override_parameters(self, mock_sendto):
d = {
'client': 'otherclient',
'metric': 'othermetric',
'time': 1.1,
'name': 'john'
}
self.measure.send('mymetric', d)
self.assertEqual(mock_sendto.call_count, 1)
message = json.loads(mock_sendto.call_args[0][0].decode('utf-8'))
self.assertEqual(len(message), 4)
self.assertIn('client', message)
self.assertEqual(message['client'], 'myclient')
self.assertIn('metric', message)
self.assertEqual(message['metric'], 'mymetric')
self.assertIn('time', message)
self.assertIsInstance(message['time'], float)
self.assertEqual(message['time'], 1.1)
self.assertIn('name', message)
self.assertEqual(message['name'], 'john')
| 39.716216
| 114
| 0.652943
| 1,351
| 11,756
| 5.492228
| 0.090303
| 0.107143
| 0.05283
| 0.093801
| 0.879919
| 0.869003
| 0.852561
| 0.843127
| 0.831132
| 0.812129
| 0
| 0.012673
| 0.207979
| 11,756
| 295
| 115
| 39.850847
| 0.784234
| 0.003743
| 0
| 0.77686
| 0
| 0
| 0.1574
| 0.00538
| 0
| 0
| 0
| 0
| 0.429752
| 1
| 0.095041
| false
| 0.016529
| 0.024793
| 0
| 0.140496
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
de97e6f8cd37358489e4c1c49082589e8b7bba39
| 3,741
|
py
|
Python
|
tests/atlas/test_atlas_webhook_tf.py
|
byorgey/irc-hooky
|
e78942b7e13ce273c40815863d0384dddfa52243
|
[
"MIT"
] | 19
|
2016-01-26T18:36:38.000Z
|
2022-03-12T02:32:01.000Z
|
tests/atlas/test_atlas_webhook_tf.py
|
byorgey/irc-hooky
|
e78942b7e13ce273c40815863d0384dddfa52243
|
[
"MIT"
] | 3
|
2016-01-29T19:43:25.000Z
|
2019-03-11T20:21:11.000Z
|
tests/atlas/test_atlas_webhook_tf.py
|
byorgey/irc-hooky
|
e78942b7e13ce273c40815863d0384dddfa52243
|
[
"MIT"
] | 2
|
2016-03-01T09:23:07.000Z
|
2020-04-01T21:53:51.000Z
|
import unittest
from irc_hooky.atlas.atlas_webhook import AtlasWebhook
class TestAtlasWebhookTerraform(unittest.TestCase):
def test_empty_payload(self):
payload = {}
event = {
"payload": payload
}
atl = AtlasWebhook(event, {})
self.assertEqual(atl.irc_message, "")
atl.process_event()
self.assertEqual(atl.irc_message, "")
def test_process_non_atl_event(self):
payload = {
"foo": "bar"
}
event = {
"payload": payload
}
atl = AtlasWebhook(event, {})
self.assertEqual(atl.irc_message, "")
atl.process_event()
self.assertEqual(atl.irc_message, "")
def test_process_invalid_atl_event(self):
payload = {
"fake_alert": {
"environment": "user/tf-test",
"message": "Queued manually in Atlas",
"number": 2,
"status": "errored",
"url": "https://url.com"
}
}
event = {
"payload": payload
}
atl = AtlasWebhook(event, {})
self.assertEqual(atl.irc_message, "")
atl.process_event()
self.assertEqual(atl.irc_message, "")
def test_unknown_tf_event(self):
payload = {
"terraform_alert": {
"environment": "user/tf-test",
"message": "Queued manually in Atlas",
"number": 2,
"status": "fakestatus",
"url": "https://url.com"
}
}
event = {
"payload": payload
}
atl = AtlasWebhook(event, {})
self.assertEqual(atl.irc_message, "")
atl.process_event()
self.assertEqual(atl.irc_message, "")
def test_tf_needs_confirmation_event(self):
payload = {
"terraform_alert": {
"environment": "user/tf-test",
"message": "Queued manually in Atlas",
"number": 2,
"status": "planned",
"url": "https://url.com"
}
}
event = {
"payload": payload
}
atl = AtlasWebhook(event, {})
self.assertEqual(atl.irc_message, "")
atl.process_event()
expected_msg = "Terraform plan needs confirmation. https://url.com"
self.assertEqual(atl.irc_message, expected_msg)
def test_tf_applied_event(self):
payload = {
"terraform_alert": {
"environment": "user/tf-test",
"message": "Queued manually in Atlas",
"number": 2,
"status": "applied",
"url": "https://url.com"
}
}
event = {
"payload": payload
}
atl = AtlasWebhook(event, {})
self.assertEqual(atl.irc_message, "")
atl.process_event()
expected_msg = "Terraform plan was applied successfully! https://url.com" # NOQA
self.assertEqual(atl.irc_message, expected_msg)
def test_tf_errored_event(self):
payload = {
"terraform_alert": {
"environment": "user/tf-test",
"message": "Queued manually in Atlas",
"number": 2,
"status": "errored",
"url": "https://url.com"
}
}
event = {
"payload": payload
}
atl = AtlasWebhook(event, {})
self.assertEqual(atl.irc_message, "")
atl.process_event()
expected_msg = "An error occurred during the Terraform plan or apply phase. https://url.com" # NOQA
self.assertEqual(atl.irc_message, expected_msg)
| 30.917355
| 108
| 0.504678
| 337
| 3,741
| 5.433234
| 0.181009
| 0.083561
| 0.13763
| 0.160568
| 0.803386
| 0.803386
| 0.803386
| 0.803386
| 0.803386
| 0.803386
| 0
| 0.002124
| 0.370756
| 3,741
| 120
| 109
| 31.175
| 0.775701
| 0.002406
| 0
| 0.666667
| 0
| 0
| 0.204881
| 0
| 0
| 0
| 0
| 0
| 0.126126
| 1
| 0.063063
| false
| 0
| 0.018018
| 0
| 0.09009
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7220fe5f26e2bd74ae2a474a7cea8655a935ed5d
| 7,041
|
py
|
Python
|
OPF_Python/src/sdp_ymat.py
|
JipKim/ACOPF
|
170c8a65844075773a4230aad93cf24b3832d1c3
|
[
"MIT"
] | 2
|
2022-02-16T19:37:42.000Z
|
2022-03-30T11:09:10.000Z
|
OPF_Python/src/sdp_ymat.py
|
JipKim/ACOPF
|
170c8a65844075773a4230aad93cf24b3832d1c3
|
[
"MIT"
] | null | null | null |
OPF_Python/src/sdp_ymat.py
|
JipKim/ACOPF
|
170c8a65844075773a4230aad93cf24b3832d1c3
|
[
"MIT"
] | null | null | null |
#%%
import numpy as np
from scipy import sparse
from scipy.linalg import block_diag
#%%
def sdp_ymat( lines, Ybus ):
nbus = Ybus.shape[0]
nline = len(lines)
# busset = np.arange(0, nbus)
# lineset = np.arange(0, nline)
#%%
def e(k): return np.eye(nbus)[:, k][np.newaxis] # size of e(k): (1, nbus)
def Yk_small(k): return (e(k).T @ e(k)) @ Ybus
def Yk(k): return (1/2) * \
np.block([
[np.real(Yk_small(k) + Yk_small(k).T), np.imag(Yk_small(k).T - Yk_small(k))],
[np.imag(Yk_small(k) - Yk_small(k).T), np.real(Yk_small(k) + Yk_small(k).T)]
])
def Yk_(k): return -(1/2) * \
np.block([
[np.imag(Yk_small(k) + Yk_small(k).T), np.real(Yk_small(k) - Yk_small(k).T)],
[np.real(Yk_small(k).T - Yk_small(k)), np.imag(Yk_small(k) + Yk_small(k).T)]
])
def Mk(k): return block_diag(e(k).T @ e(k), e(k).T @ e(k))
# Real part of line admittance
def gl(l): return np.real(1 / (lines[l].r+1j*lines[l].x))
# Imaginary part of line admittance
def bl(l): return np.imag(1 / (lines[l].r+1j*lines[l].x))
def tau(l): return 1 if lines[l].tap == 0 else lines[l].tap
def theta(l): return lines[l].shft
def gbcosft(l): return gl(l)*np.cos(theta(l)) + bl(l)*np.cos(theta(l)+np.pi/2)
def gbsinft(l): return gl(l)*np.sin(theta(l)) + bl(l)*np.sin(theta(l)+np.pi/2)
def gbcostf(l): return gl(l)*np.cos(-theta(l)) + bl(l)*np.cos(-theta(l)+np.pi/2)
def gbsintf(l): return gl(l)*np.sin(-theta(l)) + bl(l)*np.sin(-theta(l)+np.pi/2)
#%%
def Ylineft(l): return 0.5*(
sparse.coo_matrix((
[gl(l)/(tau(l)**2), -gbcosft(l)/tau(l), gbsinft(l)/tau(l),
gl(l)/(tau(l)**2), -gbsinft(l)/tau(l), -gbcosft(l)/tau(l)],
([lines[l].fbus, lines[l].fbus, lines[l].fbus, lines[l].fbus +
nbus, lines[l].fbus+nbus, lines[l].fbus+nbus],
[lines[l].fbus, lines[l].tbus, lines[l].tbus+nbus,
lines[l].fbus+nbus, lines[l].tbus, lines[l].tbus+nbus])
), shape = (2*nbus, 2*nbus))
+
sparse.coo_matrix((
[gl(l)/(tau(l)**2), -gbcosft(l)/tau(l), gbsinft(l)/tau(l),
gl(l)/(tau(l)**2), -gbsinft(l)/tau(l), -gbcosft(l)/tau(l)],
([lines[l].fbus, lines[l].fbus, lines[l].fbus, lines[l].fbus +
nbus, lines[l].fbus+nbus, lines[l].fbus+nbus],
[lines[l].fbus, lines[l].tbus, lines[l].tbus+nbus,
lines[l].fbus+nbus, lines[l].tbus, lines[l].tbus+nbus])
), shape=(2*nbus, 2*nbus)).T
)
def Y_lineft(l): return 0.5*(
sparse.coo_matrix((
[-(bl(l)+lines[l].b/2)/(tau(l)**2), gbsinft(l)/tau(l), gbcosft(l)/tau(l), -
(bl(l)+lines[l].b/2)/(tau(l)**2), -gbcosft(l)/tau(l), gbsinft(l)/tau(l)],
([lines[l].fbus, lines[l].fbus, lines[l].fbus, lines[l].fbus +
nbus, lines[l].fbus+nbus, lines[l].fbus+nbus],
[lines[l].fbus, lines[l].tbus, lines[l].tbus+nbus,
lines[l].fbus+nbus, lines[l].tbus, lines[l].tbus+nbus])
), shape=(2*nbus, 2*nbus))
+
sparse.coo_matrix((
[-(bl(l)+lines[l].b/2)/(tau(l)**2), gbsinft(l)/tau(l), gbcosft(l)/tau(l), -
(bl(l)+lines[l].b/2)/(tau(l)**2), -gbcosft(l)/tau(l), gbsinft(l)/tau(l)],
([lines[l].fbus, lines[l].fbus, lines[l].fbus, lines[l].fbus +
nbus, lines[l].fbus+nbus, lines[l].fbus+nbus],
[lines[l].fbus, lines[l].tbus, lines[l].tbus+nbus,
lines[l].fbus+nbus, lines[l].tbus, lines[l].tbus+nbus])
), shape=(2*nbus, 2*nbus)).T
)
def Ylinetf(l): return 0.5*(
sparse.coo_matrix((
[-gbcostf(l)/tau(l), -gbsintf(l)/tau(l), gbsintf(l) /
tau(l), -gbcostf(l)/tau(l), gl(l), gl(l)],
([lines[l].fbus, lines[l].fbus, lines[l].fbus+nbus,
lines[l].fbus+nbus, lines[l].tbus, lines[l].tbus+nbus],
[lines[l].tbus, lines[l].tbus+nbus, lines[l].tbus,
lines[l].tbus+nbus, lines[l].tbus, lines[l].tbus+nbus])
), shape = (2*nbus, 2*nbus))
+
sparse.coo_matrix((
[-gbcostf(l)/tau(l), -gbsintf(l)/tau(l), gbsintf(l) /
tau(l), -gbcostf(l)/tau(l), gl(l), gl(l)],
([lines[l].fbus, lines[l].fbus, lines[l].fbus+nbus,
lines[l].fbus+nbus, lines[l].tbus, lines[l].tbus+nbus],
[lines[l].tbus, lines[l].tbus+nbus, lines[l].tbus,
lines[l].tbus+nbus, lines[l].tbus, lines[l].tbus+nbus])
), shape = (2*nbus, 2*nbus)).T
)
def Y_linetf(l): return 0.5*(
sparse.coo_matrix((
[gbsintf(l)/tau(l), -gbcostf(l)/tau(l), gbcostf(l)/tau(l),
gbsintf(l)/tau(l), -(bl(l)+lines[l].b/2), -(bl(l)+lines[l].b/2)],
([lines[l].fbus, lines[l].fbus, lines[l].fbus+nbus,
lines[l].fbus+nbus, lines[l].tbus, lines[l].tbus+nbus],
[lines[l].tbus, lines[l].tbus+nbus, lines[l].tbus,
lines[l].tbus+nbus, lines[l].tbus, lines[l].tbus+nbus])
), shape=(2*nbus, 2*nbus))
+
sparse.coo_matrix((
[gbsintf(l)/tau(l), -gbcostf(l)/tau(l), gbcostf(l)/tau(l),
gbsintf(l)/tau(l), -(bl(l)+lines[l].b/2), -(bl(l)+lines[l].b/2)],
([lines[l].fbus, lines[l].fbus, lines[l].fbus+nbus,
lines[l].fbus+nbus, lines[l].tbus, lines[l].tbus+nbus],
[lines[l].tbus, lines[l].tbus+nbus, lines[l].tbus,
lines[l].tbus+nbus, lines[l].tbus, lines[l].tbus+nbus])
), shape=(2*nbus, 2*nbus)).T
)
def YL(l): return sparse.coo_matrix((
[1, -1, 1, -1, -1, 1, -1, 1],
([lines[l].fbus, lines[l].fbus, lines[l].fbus+nbus, lines[l].fbus+nbus,
lines[l].tbus, lines[l].tbus, lines[l].tbus+nbus, lines[l].tbus+nbus],
[lines[l].fbus, lines[l].tbus, lines[l].fbus+nbus, lines[l].tbus+nbus,
lines[l].fbus, lines[l].tbus, lines[l].fbus+nbus, lines[l].tbus+nbus])
), shape = (2*nbus, 2*nbus)) * lines[l].r * (gl(l)**2 + bl(l)**2)
def YL_(l): return (sparse.coo_matrix((
[1, -1, 1, -1, -1, 1, -1, 1],
([lines[l].fbus, lines[l].fbus, lines[l].fbus+nbus, lines[l].fbus+nbus,
lines[l].tbus, lines[l].tbus, lines[l].tbus+nbus, lines[l].tbus+nbus],
[lines[l].fbus, lines[l].tbus, lines[l].fbus+nbus, lines[l].tbus+nbus,
lines[l].fbus, lines[l].tbus, lines[l].fbus+nbus, lines[l].tbus+nbus])
), shape = (2*nbus, 2*nbus)) * lines[l].x * (gl(l)**2 + bl(l)**2)
-
sparse.coo_matrix((
[1, 1, 1, 1],
([lines[l].fbus, lines[l].fbus+nbus, lines[l].tbus, lines[l].tbus+nbus],
[lines[l].fbus, lines[l].fbus+nbus, lines[l].tbus, lines[l].tbus+nbus])
), shape = (2*nbus, 2*nbus)) * lines[l].b / 2)
return Yk, Yk_, Mk, Ylineft, Ylinetf, Y_lineft, Y_linetf, YL, YL_
| 45.134615
| 89
| 0.512427
| 1,174
| 7,041
| 3.040034
| 0.062181
| 0.258896
| 0.19053
| 0.142897
| 0.875035
| 0.856823
| 0.85234
| 0.83805
| 0.827683
| 0.815915
| 0
| 0.016885
| 0.251385
| 7,041
| 155
| 90
| 45.425806
| 0.660216
| 0.021872
| 0
| 0.637097
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16129
| false
| 0
| 0.024194
| 0.153226
| 0.193548
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 9
|
19c610c8de96f02886afd683ab431bfd08f9796c
| 11,383
|
py
|
Python
|
robin_stocks/export.py
|
VacuusUmbra/robin_stocks
|
52ee37483a8d859a6de70b5248068ac5a36806be
|
[
"MIT"
] | null | null | null |
robin_stocks/export.py
|
VacuusUmbra/robin_stocks
|
52ee37483a8d859a6de70b5248068ac5a36806be
|
[
"MIT"
] | null | null | null |
robin_stocks/export.py
|
VacuusUmbra/robin_stocks
|
52ee37483a8d859a6de70b5248068ac5a36806be
|
[
"MIT"
] | null | null | null |
from csv import writer
from datetime import date, timedelta, datetime
import sys
from pathlib import Path
import robin_stocks.helper as helper
import robin_stocks.orders as orders
import robin_stocks.stocks as stocks
def fix_file_extension(file_name):
""" Takes a file extension and makes it end with .csv
:param file_name: Name of the file.
:type file_name: str
:returns: Adds or replaces the file suffix with .csv and returns it as a string.
"""
path = Path(file_name)
path = path.with_suffix('.csv')
return path.resolve()
def create_absolute_csv(dir_path, file_name, order_type):
""" Creates a filepath given a directory and file name.
:param dir_path: Absolute or relative path to the directory the file will be written.
:type dir_path: str
:param file_name: An optional argument for the name of the file. If not defined, filename will be stock_orders_{current date}
:type file_name: str
:param file_name: Will be 'stock', 'option', or 'crypto'
:type file_name: str
:returns: An absolute file path as a string.
"""
path = Path(dir_path)
directory = path.resolve()
if not file_name:
file_name = "{}_orders_{}.csv".format(order_type, date.today().strftime('%b-%d-%Y'))
else:
file_name = fix_file_extension(file_name)
return(Path.joinpath(directory, file_name))
@helper.login_required
def export_completed_stock_orders(dir_path, file_name=None):
"""Write all completed orders to a csv file
:param dir_path: Absolute or relative path to the directory the file will be written.
:type dir_path: str
:param file_name: An optional argument for the name of the file. If not defined, filename will be stock_orders_{current date}
:type file_name: Optional[str]
"""
file_path = create_absolute_csv(dir_path, file_name, 'stock')
all_orders = orders.get_all_stock_orders()
with open(file_path, 'w', newline='') as f:
csv_writer = writer(f)
csv_writer.writerow([
'symbol',
'date',
'order_type',
'side',
'fees',
'quantity',
'average_price'
])
for order in all_orders:
if order['state'] == 'filled' and order['cancel'] is None:
csv_writer.writerow([
stocks.get_symbol_by_url(order['instrument']),
order['last_transaction_at'],
order['type'],
order['side'],
order['fees'],
order['quantity'],
order['average_price']
])
f.close()
@helper.login_required
def export_all_stock_orders(dir_path, file_name=None):
"""Write all completed orders to a csv file
:param dir_path: Absolute or relative path to the directory the file will be written.
:type dir_path: str
:param file_name: An optional argument for the name of the file. If not defined, filename will be stock_orders_{current date}
:type file_name: Optional[str]
"""
file_path = create_absolute_csv(dir_path, file_name, 'stock')
all_orders = orders.get_all_stock_orders()
with open(file_path, 'w', newline='') as f:
csv_writer = writer(f)
csv_writer.writerow([
'symbol',
'date',
'order_type',
'side',
'fees',
'quantity',
'average_price'
])
for order in all_orders:
csv_writer.writerow([
stocks.get_symbol_by_url(order['instrument']),
order['last_transaction_at'],
order['type'],
order['side'],
order['fees'],
order['quantity'],
order['average_price']
])
f.close()
@helper.login_required
def export_completed_option_orders(dir_path, file_name=None, page_limit=sys.maxsize):
"""Write all completed option orders to a csv
:param dir_path: Absolute or relative path to the directory the file will be written.
:type dir_path: str
:param file_name: An optional argument for the name of the file. If not defined, filename will be option_orders_{current date}
:type file_name: Optional[str]
"""
file_path = create_absolute_csv(dir_path, file_name, 'option')
all_orders = orders.get_all_option_orders(page_limit)
with open(file_path, 'w', newline='') as f:
csv_writer = writer(f)
csv_writer.writerow([
'chain_symbol',
'expiration_date',
'strike_price',
'option_type',
'side',
'order_created_at',
'direction',
'order_quantity',
'order_type',
'opening_strategy',
'closing_strategy',
'processed_premium',
'processed_quantity'
])
for order in all_orders:
if order['state'] == 'filled':
for leg in order['legs']:
instrument_data = helper.request_get(leg['option'])
csv_writer.writerow([
order['chain_symbol'],
instrument_data['expiration_date'],
instrument_data['strike_price'],
instrument_data['type'],
leg['side'],
order['created_at'],
order['direction'],
order['quantity'],
order['type'],
order['opening_strategy'],
order['closing_strategy'],
order['processed_premium'],
order['processed_quantity']
])
f.close()
@helper.login_required
def export_all_option_orders(dir_path, file_name=None, page_limit=sys.maxsize):
"""Write all option orders to a csv
:param dir_path: Absolute or relative path to the directory the file will be written.
:type dir_path: str
:param file_name: An optional argument for the name of the file. If not defined, filename will be option_orders_{current date}
:type file_name: Optional[str]
"""
file_path = create_absolute_csv(dir_path, file_name, 'option')
all_orders = orders.get_all_option_orders(page_limit)
with open(file_path, 'w', newline='') as f:
csv_writer = writer(f)
csv_writer.writerow([
'chain_symbol',
'expiration_date',
'strike_price',
'option_type',
'side',
'order_created_at',
'direction',
'order_quantity',
'order_type',
'opening_strategy',
'closing_strategy',
'processed_premium',
'processed_quantity'
])
for order in all_orders:
for leg in order['legs']:
instrument_data = helper.request_get(leg['option'])
csv_writer.writerow([
order['chain_symbol'],
instrument_data['expiration_date'],
instrument_data['strike_price'],
instrument_data['type'],
leg['side'],
order['created_at'],
order['direction'],
order['quantity'],
order['type'],
order['opening_strategy'],
order['closing_strategy'],
order['processed_premium'],
order['processed_quantity']
])
f.close()
@helper.login_required
def export_option_orders_date_range(dir_path, start_date, end_date, file_name=None, page_limit=sys.maxsize):
"""Write all of the option orders within a date range to a csv
:param dir_path: Absolute or relative path to the directory the file will be written.
:type dir_path: str
:param file_name: An optional argument for the name of the file. If not defined, filename will be option_orders_{current date}
:type file_name: Optional[str]
:param start_date: The start of the date range in format 2020-12-21
:type start_date: str
:param end_date: The end of the date range in format 2020-12-21
:type end_date: str
"""
file_path = create_absolute_csv(dir_path, file_name, 'option')
all_orders = orders.get_all_option_orders(page_limit)
with open(file_path, 'w', newline='') as f:
csv_writer = writer(f)
csv_writer.writerow([
'chain_symbol',
'expiration_date',
'strike_price',
'option_type',
'side',
'order_created_at',
'direction',
'order_quantity',
'order_type',
'opening_strategy',
'closing_strategy',
'processed_premium',
'processed_quantity'
])
for order in all_orders:
orderDate = datetime.strptime(order['created_at'][:10], "%Y-%m-%d")
start = datetime.strptime(str(start_date), "%Y-%m-%d")
end = datetime.strptime(str(end_date), "%Y-%m-%d")
if start <= orderDate <= end:
for leg in order['legs']:
instrument_data = helper.request_get(leg['option'])
csv_writer.writerow([
order['chain_symbol'],
instrument_data['expiration_date'],
instrument_data['strike_price'],
instrument_data['type'],
leg['side'],
order['created_at'],
order['direction'],
order['quantity'],
order['type'],
order['opening_strategy'],
order['closing_strategy'],
order['processed_premium'],
order['processed_quantity']
])
f.close()
@helper.login_required
def export_todays_option_orders(dir_path, file_name=None):
today = datetime.today().strftime("%Y-%m-%d")
export_option_orders_date_range(dir_path, today, today, file_name,10)
@helper.login_required
def get_option_orders_date_range(start_date, end_date, page_limit=sys.maxsize):
"""Get all of the option orders within a date range in an array
:param start_date: The start of the date range in format 2020-12-21
:type start_date: str
:param end_date: The end of the date range in format 2020-12-21
:type end_date: str
"""
order_list = []
all_orders = orders.get_all_option_orders(page_limit)
for order in all_orders:
orderDate = datetime.strptime(order['created_at'][:10], "%Y-%m-%d")
start = datetime.strptime(str(start_date), "%Y-%m-%d")
end = datetime.strptime(str(end_date), "%Y-%m-%d")
if start <= orderDate <= end:
order_list.append(order)
return order_list
@helper.login_required
def get_todays_option_orders():
today = datetime.today().strftime("%Y-%m-%d")
return get_option_orders_date_range(today, today, 10)
| 38.070234
| 134
| 0.570676
| 1,325
| 11,383
| 4.664906
| 0.101887
| 0.047889
| 0.023297
| 0.026695
| 0.872027
| 0.836272
| 0.82883
| 0.799223
| 0.798253
| 0.76816
| 0
| 0.005234
| 0.328648
| 11,383
| 299
| 135
| 38.070234
| 0.803585
| 0.2256
| 0
| 0.842593
| 0
| 0
| 0.161309
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046296
| false
| 0
| 0.032407
| 0
| 0.092593
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
19d97308669a669fd8da85a0fdeb5138bd4966b8
| 11,078
|
py
|
Python
|
code/train.py
|
ArvindSubramaniam/Pruning-Networks-using-Neuron2Neuron-Skip-Connections
|
bbe402bbf4c5afb4ae712354e8fca5ce320501b8
|
[
"Apache-2.0"
] | 1
|
2021-11-16T03:36:51.000Z
|
2021-11-16T03:36:51.000Z
|
code/train.py
|
ArvindSubramaniam/Pruning-Networks-using-Neuron2Neuron-Skip-Connections
|
bbe402bbf4c5afb4ae712354e8fca5ce320501b8
|
[
"Apache-2.0"
] | null | null | null |
code/train.py
|
ArvindSubramaniam/Pruning-Networks-using-Neuron2Neuron-Skip-Connections
|
bbe402bbf4c5afb4ae712354e8fca5ce320501b8
|
[
"Apache-2.0"
] | 3
|
2020-12-29T01:52:01.000Z
|
2021-11-16T03:36:52.000Z
|
import time
import torch
import torch.optim as optim
import torch.nn as nn
from torch.autograd import Variable, Function
import torch.nn.functional as F
from torch.utils.data import Dataset, DataLoader, ConcatDataset, random_split
import torchvision
from torchvision import datasets
from torchvision import transforms
import matplotlib.pyplot as plt
import numpy as np
###Downloading the dataset - MNIST
transform = transforms.Compose(
[transforms.ToTensor(),
transforms.Normalize((0.5,), (0.5,))])
trainset_MNIST = torchvision.datasets.MNIST(root='./data', train=True,
download=True, transform=transform)
train_loader_MNIST = torch.utils.data.DataLoader(trainset_MNIST, batch_size=50,
shuffle=True, num_workers=2)
valset_MNIST = torchvision.datasets.MNIST(root='./data', train=False,
download=True, transform=transform)
val_loader_MNIST = torch.utils.data.DataLoader(valset_MNIST, batch_size=50,
shuffle=False, num_workers=2)
###Downloading the dataset - CIFAR 10
transform = transforms.Compose(
[transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])
trainset = torchvision.datasets.CIFAR10(root='./data', train=True,
download=True, transform=transform)
train_loader = torch.utils.data.DataLoader(trainset, batch_size=50,
shuffle=True, num_workers=2)
valset = torchvision.datasets.CIFAR10(root='./data', train=False,
download=True, transform=transform)
val_loader = torch.utils.data.DataLoader(valset, batch_size=50,
shuffle=False, num_workers=2)
print(len(trainset),len(valset))
print(len(train_loader),len(val_loader))
def train1_conv(model1):
criterion =nn.NLLLoss()
model1 = model1.cuda()
optimizer = optim.SGD(model1.parameters(), lr=0.003, momentum=0.9)
time0 = time.time()
training_loss = []
epochs = 50
for epoch in range(epochs):
running_loss = 0
correct = 0
for data, labels in train_loader:
# Flatten images
data = data.view(data.shape[0], -1)
# Training pass
optimizer.zero_grad()
data,labels = data.cuda(),labels.cuda()
output = model1(data.double())
loss = criterion(output.double(), labels)
loss.backward()
optimizer.step()
running_loss += loss.item()
else:
print(f"Epoch {epoch+1}/{epochs} - Training loss: {running_loss/len(train_loader)}")
training_loss.append(running_loss/len(train_loader))
print("\nTraining Time (in minutes) =",(time.time()-time0)/60)
return training_loss
def train_conv(model1):
criterion =nn.NLLLoss()
model1 = model1.cuda()
optimizer = optim.SGD(model1.parameters(), lr=0.003, momentum=0.9)
time0 = time.time()
training_loss = []
epochs = 10
for epoch in range(epochs):
running_loss = 0
correct = 0
for data, labels in train_loader:
# Flatten images
#data = data.view(data.shape[0], -1)
# Training pass
optimizer.zero_grad()
data,labels = data.cuda(),labels.cuda()
output = model1(data)
loss = criterion(output, labels)
#This is where the model learns by backpropagating
loss.backward()
#And optimizes its weights here
optimizer.step()
running_loss += loss.item()
training_loss.append(running_loss/len(train_loader))
print(f"Epoch {epoch+1}/{epochs} - Training loss: {running_loss/len(train_loader)}")
print("\nTraining Time (in minutes) =",(time.time()-time0)/60)
return training_loss
def train1(model1):
minimum = 1000
criterion =nn.NLLLoss()
model1 = model1.cuda()
optimizer = optim.SGD(model1.parameters(), lr=0.003, momentum=0.9)
time0 = time.time()
training_loss,validation_loss = [],[]
epochs = 50
for epoch in range(epochs):
running_loss = 0
correct = 0
for data, labels in train_loader:
# Flatten images
data = data.view(data.shape[0], -1)
# Training pass
optimizer.zero_grad()
data,labels = data.cuda(),labels.cuda()
output = model1(data.double())
loss = criterion(output.double(), labels)
#This is where the model learns by backpropagating
loss.backward()
#And optimizes its weights here
optimizer.step()
running_loss += loss.item()
training_loss.append(running_loss/len(train_loader))
###Validation Loss
correct = 0
total = 0
val_loss = 0
model1.eval()
with torch.no_grad():
for data, targets in val_loader:
# Flatten images
data = data.view(data.shape[0], -1)
# Training pass
optimizer.zero_grad()
data,targets = data.cuda(),targets.cuda()
outputs = model1(data.double())
test_loss = criterion(outputs.double(), targets)
_, predicted = torch.max(outputs.data, 1)
total += targets.size(0)
correct += (predicted == targets).sum().item()
val_loss += test_loss.item()
validation_loss.append(val_loss/len(val_loader))
if minimum > (val_loss/len(val_loader)):
minimum = val_loss/len(val_loader)
print(f"Epoch {epoch+1}/{epochs} - Training loss: {running_loss/len(train_loader)} Validation loss: {val_loss/len(val_loader)}")
if np.allclose(val_loss/len(val_loader),minimum,atol=0.03):
continue
else:
break
print("\nTraining Time (in minutes) =",(time.time()-time0)/60)
return training_loss,validation_loss
def train(model1):
minimum = 1000
criterion =nn.NLLLoss()
model1 = model1.cuda()
optimizer = optim.SGD(model1.parameters(), lr=0.003, momentum=0.9)
time0 = time.time()
training_loss = []
validation_loss = []
epochs = 50
for epoch in range(epochs):
running_loss = 0
correct = 0
for data, labels in train_loader:
# Flatten images
data = data.view(data.shape[0], -1)
# Training pass
optimizer.zero_grad()
data,labels = data.cuda(),labels.cuda()
output = model1(data)
loss = criterion(output, labels)
#This is where the model learns by backpropagating
loss.backward()
#And optimizes its weights here
optimizer.step()
running_loss += loss.item()
training_loss.append(running_loss/len(train_loader))
###Validation Loss
correct = 0
total = 0
val_loss = 0
model1.eval()
with torch.no_grad():
for data, targets in val_loader:
# Flatten images
data = data.view(data.shape[0], -1)
# Training pass
optimizer.zero_grad()
data,targets = data.cuda(),targets.cuda()
outputs = model1(data)
test_loss = criterion(outputs, targets)
_, predicted = torch.max(outputs.data, 1)
total += targets.size(0)
correct += (predicted == targets).sum().item()
val_loss += test_loss.item()
validation_loss.append(val_loss/len(val_loader))
if minimum > (val_loss/len(val_loader)):
minimum = val_loss/len(val_loader)
print(f"Epoch {epoch+1}/{epochs} - Training loss: {running_loss/len(train_loader)} Validation loss: {val_loss/len(val_loader)}")
if np.allclose(val_loss/len(val_loader),minimum,atol=0.03):
continue
else:
break
print("\nTraining Time (in minutes) =",(time.time()-time0)/60)
return training_loss,validation_loss
def validation(model1):
correct_count, all_count = 0, 0
model1.eval()
for images,labels in val_loader:
for i in range(len(labels)):
img = images[i].view(1, 3072)
with torch.no_grad():
img,labels = img.cuda(),labels.cuda()
logps = model1(img)
ps = torch.exp(logps)
probab = list(ps.cpu().numpy()[0])
pred_label = probab.index(max(probab))
true_label = labels.cpu().numpy()[i]
if(true_label == pred_label):
correct_count += 1
all_count += 1
print("Number Of Images Tested =", all_count)
print("\nModel Accuracy =", (100*correct_count/all_count))
return 100*correct_count/all_count
def validation1(model1):
correct_count, all_count = 0, 0
model1.eval()
for images,labels in val_loader:
for i in range(len(labels)):
img = images[i].view(1, 3072)
with torch.no_grad():
img,labels = img.cuda(),labels.cuda()
logps = model1(img.double())
ps = torch.exp(logps.double())
probab = list(ps.double().cpu().numpy()[0])
pred_label = probab.index(max(probab))
true_label = labels.cpu().numpy()[i]
if(true_label == pred_label):
correct_count += 1
all_count += 1
print("Number Of Images Tested =", all_count)
print("\nModel Accuracy =", (100*correct_count/all_count))
return 100*correct_count/all_count
'''
def validation_conv(model1):
correct_count, all_count = 0, 0
model1.eval()
for images,labels in val_loader:
for i in range(len(labels)):
img = images[i]
with torch.no_grad():
img,labels = img.cuda(),labels.cuda()
logps = model1(img,sparsity)
ps = torch.exp(logps)
probab = list(ps.cpu().numpy()[0])
pred_label = probab.index(max(probab))
true_label = labels.cpu().numpy()[i]
if(true_label == pred_label):
correct_count += 1
all_count += 1
print("Number Of Images Tested =", all_count)
print("\nModel Accuracy =", (100*correct_count/all_count))
return 100*correct_count/all_count
'''
def validation_conv(model):
correct = 0
total = 0
model = model.cuda()
with torch.no_grad():
for data in val_loader:
images, labels = data
images, labels = images.cuda(),labels.cuda()
outputs = model(images)
_, predicted = torch.max(outputs.data, 1)
total += labels.size(0)
correct += (predicted == labels).sum().item()
print("Accuracy: ", 100*correct/total)
return 100*correct/total
def plots(training_loss,validation_loss):
fig,ax = plt.subplots()
ax.plot(training_loss,label='Training_loss')
ax.plot(validation_loss,label='Validation_loss')
plt.legend()
plt.show()
| 33.877676
| 136
| 0.589727
| 1,322
| 11,078
| 4.816944
| 0.121785
| 0.026853
| 0.020729
| 0.020415
| 0.853015
| 0.836997
| 0.811244
| 0.796796
| 0.786118
| 0.771043
| 0
| 0.027884
| 0.291027
| 11,078
| 326
| 137
| 33.981595
| 0.782913
| 0.048836
| 0
| 0.746781
| 0
| 0.008584
| 0.066599
| 0.018386
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034335
| false
| 0
| 0.051502
| 0
| 0.11588
| 0.064378
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c22e9e49f0e1066e6fea478a9b82e9baa73ec67e
| 2,087
|
py
|
Python
|
evalml/tests/demo_tests/test_datasets.py
|
BlockchainClimateInstitute/price_microservice
|
11d1cff8965fe1befc997e9da3dc09efceed4579
|
[
"BSD-3-Clause"
] | null | null | null |
evalml/tests/demo_tests/test_datasets.py
|
BlockchainClimateInstitute/price_microservice
|
11d1cff8965fe1befc997e9da3dc09efceed4579
|
[
"BSD-3-Clause"
] | 13
|
2021-03-04T19:29:09.000Z
|
2022-03-07T01:00:43.000Z
|
evalml/tests/demo_tests/test_datasets.py
|
RG4421/evalml
|
33c62abe6d107d1da2f54e9e44a90f18aaf916a9
|
[
"BSD-3-Clause"
] | null | null | null |
import pandas as pd
import woodwork as ww
from evalml import demos
def test_fraud():
X, y = demos.load_fraud()
assert X.shape == (99992, 12)
assert y.shape == (99992,)
assert isinstance(X, ww.DataTable)
assert isinstance(y, ww.DataColumn)
X, y = demos.load_fraud(1000)
assert X.shape == (1000, 12)
assert y.shape == (1000,)
X, y = demos.load_fraud(1000, return_pandas=True)
assert X.shape == (1000, 12)
assert y.shape == (1000,)
assert isinstance(X, pd.DataFrame)
assert isinstance(y, pd.Series)
def test_wine():
X, y = demos.load_wine()
assert X.shape == (178, 13)
assert y.shape == (178,)
assert isinstance(X, ww.DataTable)
assert isinstance(y, ww.DataColumn)
X, y = demos.load_wine(return_pandas=True)
assert X.shape == (178, 13)
assert y.shape == (178,)
assert isinstance(X, pd.DataFrame)
assert isinstance(y, pd.Series)
def test_breast_cancer():
X, y = demos.load_breast_cancer()
assert X.shape == (569, 30)
assert y.shape == (569,)
assert isinstance(X, ww.DataTable)
assert isinstance(y, ww.DataColumn)
X, y = demos.load_breast_cancer(return_pandas=True)
assert X.shape == (569, 30)
assert y.shape == (569,)
assert isinstance(X, pd.DataFrame)
assert isinstance(y, pd.Series)
def test_diabetes():
X, y = demos.load_diabetes()
assert X.shape == (442, 10)
assert y.shape == (442,)
assert isinstance(X, ww.DataTable)
assert isinstance(y, ww.DataColumn)
X, y = demos.load_diabetes(return_pandas=True)
assert X.shape == (442, 10)
assert y.shape == (442,)
assert isinstance(X, pd.DataFrame)
assert isinstance(y, pd.Series)
def test_churn():
X, y = demos.load_churn()
assert X.shape == (7043, 19)
assert y.shape == (7043,)
assert isinstance(X, ww.DataTable)
assert isinstance(y, ww.DataColumn)
X, y = demos.load_churn(return_pandas=True)
assert X.shape == (7043, 19)
assert y.shape == (7043,)
assert isinstance(X, pd.DataFrame)
assert isinstance(y, pd.Series)
| 26.417722
| 55
| 0.643987
| 301
| 2,087
| 4.385382
| 0.13289
| 0.242424
| 0.058333
| 0.091667
| 0.9
| 0.837121
| 0.72803
| 0.72803
| 0.72803
| 0.676515
| 0
| 0.066176
| 0.218016
| 2,087
| 78
| 56
| 26.75641
| 0.742647
| 0
| 0
| 0.655738
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.688525
| 1
| 0.081967
| true
| 0
| 0.04918
| 0
| 0.131148
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
5f2b9e46fce34da567e2e296d466fae65d6f654c
| 37,875
|
py
|
Python
|
VirClass/tests/test_load.py
|
thecoparyew/Virus-classification-theano
|
55c4a7b804fa65d14c2167a3bbbaa2cf1b4a3521
|
[
"MIT"
] | null | null | null |
VirClass/tests/test_load.py
|
thecoparyew/Virus-classification-theano
|
55c4a7b804fa65d14c2167a3bbbaa2cf1b4a3521
|
[
"MIT"
] | 5
|
2016-12-08T17:51:59.000Z
|
2017-02-23T11:18:32.000Z
|
VirClass/tests/test_load.py
|
thecoparyew/Virus-classification-theano
|
55c4a7b804fa65d14c2167a3bbbaa2cf1b4a3521
|
[
"MIT"
] | null | null | null |
# pylint: disable=missing-docstring, protected-access, unused-argument, too-many-arguments, too-many-statements
# pylint: disable=too-many-locals, bad-continuation
# pydocstyle: disable=missing-docstring
from collections import defaultdict
from io import StringIO
from unittest import TestCase, main
from unittest import mock
from unittest.mock import patch, mock_open, MagicMock, file_spec
import numpy as np
import VirClass.VirClass.load as load
class LoadUnitTests(TestCase):
def test_one_hot(self):
# tests: 1x list, 1x np.array, n < number_of_classes, n = number_of_classes, n > number_of_classes
x = [0, 1, 3, 2, 0]
x_1 = np.array([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 0, 1], [0, 0, 1, 0], [1, 0, 0, 0]])
x_2 = np.array([[1, 0, 0, 0, 0], [0, 1, 0, 0, 0], [0, 0, 0, 1, 0], [0, 0, 1, 0, 0],
[1, 0, 0, 0, 0]])
number_of_classes = max(x) + 1
self.assertRaisesRegex(AssertionError, "Cannot create numpy array; number of classes must be bigger than max "
"number of list.", load.one_hot, x, number_of_classes - 1)
np.testing.assert_array_equal(load.one_hot(x, number_of_classes), x_1)
np.testing.assert_array_equal(load.one_hot(x, number_of_classes + 1), x_2)
np.testing.assert_array_equal(load.one_hot(np.array(x), number_of_classes), x_1)
def test_seq_to_bits(self):
vec = "ATCGYM"
test_atcgym = [1, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0,
0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 1]
test_atcg = [1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1,
1, 1, 1, 1,
1, 1, 1, 1]
dict_1 = {"A": [1, 1, 0], "G": [1, 0, 0], "T": [1, 1, 1]}
test_dict_1 = [1, 1, 0,
1, 1, 1,
1, 1, 1,
1, 0, 0,
1, 1, 1,
1, 1, 1]
dict_2 = {"T": [1, 0], "C": [0, 1]}
test_dict_2 = [1, 1,
1, 0,
0, 1,
1, 1,
1, 1,
1, 1]
self.assertRaisesRegex(AssertionError, "Number of unique nucleotides and transmission dictionary not present.",
load.seq_to_bits, vec, None, None)
res = load.seq_to_bits(vec, "ATCGYM", None)
self.assertEqual(res, test_atcgym)
self.assertEqual(len(res) % 6, 0) # we have 6 unique nucleotides - len % 6 must be 0
res = load.seq_to_bits(vec, "ATCG", None)
self.assertEqual(res, test_atcg)
self.assertEqual(len(res) % 4, 0)
res = load.seq_to_bits(vec, None, dict_1)
self.assertEqual(res, test_dict_1)
self.assertEqual(len(res) % 3, 0)
res = load.seq_to_bits(vec, "AT", dict_1)
self.assertEqual(res, test_dict_1)
self.assertEqual(len(res) % 3, 0)
res = load.seq_to_bits(vec, None, dict_2)
self.assertEqual(res, test_dict_2)
self.assertEqual(len(res) % 2, 0)
res = load.seq_to_bits(vec, "CTGM", dict_2)
self.assertEqual(res, test_dict_2)
self.assertEqual(len(res) % 2, 0)
@patch('VirClass.VirClass.load.os.path.isfile')
@patch('VirClass.VirClass.load.load_seqs_from_ncbi')
def test_load_from_file_fasta(self, arg1, arg2):
load.os.path.isfile.return_value = True
temp = defaultdict(list)
temp['1004345262'] = \
'TGTTGCGTTAACAACAAACCAACCTCCGACCCAAAACAAAGATGAAAATAAAAGATGCCACCCAAACGCCGACTAGTGGACAGCCCAGAAGATATGGAAGAAA' \
'GATGCCACCCAAACGCCGACTAGTGGACAGCCCAGAAGATATGGAAGACGAGGGACCCTCTGACCGACCAACTCACCTACCCAAACTCCCAGGAACC'
res_tuple = (
temp,
{
'1004345262':
'Viruses;ssRNA viruses;ssRNA negative-strand viruses;Mononegavirales;Bornaviridae;Bornavirus'
}
)
read_data = \
'>1004345262 Viruses;ssRNA viruses;ssRNA negative-strand viruses;Mononegavirales;Bornaviridae;Bornavirus' \
'\nTGTTGCGTTAACAACAAACCAACCTCCGACCCAAAACAAAGATGAAAATAAAAGATGCCACCCAAACGCCGACTAGTGGACAGCCCAGAAGATATGGAAG\n' \
'AAAGATGCCACCCAAACGCCGACTAGTGGACAGCCCAGAAGATATGGAAGACGAGGGACCCTCTGACCGACCAACTCACCTACCCAAACTCCCAGGAACC\n'
# https://www.biostars.org/p/190067/
with patch('VirClass.VirClass.load.gzip.open') as mocked_open:
handle = MagicMock(spec=file_spec)
handle.__enter__.return_value = StringIO(read_data)
mocked_open.return_value = handle
res = load.load_from_file_fasta('bla.bla')
mocked_open.assert_called_once_with('bla.bla', 'rt')
self.assertEqual(res, res_tuple)
load.os.path.isfile.return_value = False
load.load_seqs_from_ncbi.return_value = res_tuple
with patch('VirClass.VirClass.load.gzip.open', mock_open(), create=True) as mocked_open:
res = load.load_from_file_fasta('bla.bla')
mocked_open.assert_called_once_with('bla.bla', 'wt')
self.assertEqual(res, res_tuple)
# @patch('VirClass.VirClass.load.seq_to_bits')
def test_dataset_from_id(self):
# data
dict_1 = {"A": [1, 0, 0, 0], "T": [0, 1, 0, 0], "C": [0, 0, 1, 0], "G": [0, 0, 0, 1]}
temp_data = defaultdict(list)
temp_data['1004345262'] = \
'TGTTGCGTTAACAACAAACCAACCTCCGACCCAAAACAAAGATGAAAATAAAAGATGCCACCCAAACGCCGACTAGTGGACAGCCCAGAAGATATGGAAGAAA' \
'GATGCCACCCAAACGCCGACTAGTGGACAGCCCAGAAGATATGGAAGACGAGGGACCCTCTGACCGACCAACTCACCTACCCAAACTCCCAGGAACC'
temp_data['10043452'] = \
'GATGCCACCCAAACGCCGACTAGTGGACAGCCCAGAAGATATGGAAGACGAGGGACCCTCTGACCGACCAACTCACCTACCCAAACTCCCAGGAACC' \
'TGTTGCGTTAACAACAAACCAACCTCCGACCCAAAACAAAGATGAAAATAAAAGATGCCACCCAAACGCCGACTAGTGGACAGCCCAGAAGATATGGA'
temp_tax = {'10043452': 0, '1004345262': 1}
ids = ['1004345262', '10043452']
# test1
expected_x = [
[0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0,
1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0,
0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0,
0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0,
1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1,
1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0,
1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0,
0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0,
1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 1],
[1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0,
1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1,
0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0,
0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0,
1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0,
1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0,
1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 0],
[0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0,
0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1,
1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0,
0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0,
1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0,
1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1,
1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0,
0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0,
1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1,
0, 1, 0, 0]]
expected_y = [1, 1, 0]
res = load.dataset_from_id(temp_data, temp_tax, ids, 100, 1.0, dict_1)
self.assertTrue(res, (expected_x, expected_y))
# test2
res = load.dataset_from_id(defaultdict(list), {}, [], 100, 0.5, dict_1)
self.assertTrue(res, ([], []))
# test3
expected_x2 = [
[0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0,
1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0,
0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0,
0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0,
1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1,
1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0,
1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0,
0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0,
1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 1],
[0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0,
0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1,
1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0,
0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0,
1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0,
1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1,
1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0,
0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0,
1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1,
0, 1, 0, 0]]
expected_y2 = [1, 0]
res = load.dataset_from_id(temp_data, temp_tax, ids, 100, 0.2, dict_1)
self.assertTrue(res, (expected_x2, expected_y2))
# test4
expected_x20 = [
[0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0,
1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0],
[0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0,
1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0,
0, 0, 1, 0, 1, 0, 0, 0],
[0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0,
1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0,
1, 0, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1,
1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1,
1, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1,
1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0],
[0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0,
0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1,
1, 0, 0, 0, 0, 0, 1, 0],
[1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 1, 0],
[0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0,
0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0,
0, 0, 0, 1, 0, 1, 0, 0],
[0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0,
1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1,
1, 0, 0, 0, 0, 1, 0, 0],
[1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0,
0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0,
1, 0, 0, 0, 0, 0, 0, 1]]
expected_y20 = [1, 1, 1, 1, 1, 0, 0, 0, 0, 0]
res = load.dataset_from_id(temp_data, temp_tax, ids, 20, 0.5, dict_1)
self.assertTrue(res, (expected_x20, expected_y20))
self.assertRaisesRegex(AssertionError, "Sampling size is in wrong range - it must be between 0.0 and 1.0.",
load.dataset_from_id, temp_data, temp_tax, ids, 20, 20, dict_1)
self.assertRaisesRegex(AssertionError, "Both transmission dictionary and unique nucleotides cannot be empty.",
load.dataset_from_id, temp_data, temp_tax, ids, 20, 0.5, None)
@patch('VirClass.VirClass.load.pickle.load')
def test_load_dataset(self, mock_pickle_load):
m_file = mock_open()
with patch('VirClass.VirClass.load.gzip.open', m_file):
load.load_dataset('bla.bla')
self.assertEqual(mock_pickle_load.call_count, 1)
self.assertTrue(m_file.called)
m_file.assert_called_once_with('bla.bla', 'rt')
@patch('VirClass.VirClass.load.pickle.dump')
def test_save_dataset(self, mock_pickle_dump):
m_file = mock_open()
with patch('VirClass.VirClass.load.gzip.open', m_file):
load.save_dataset('bla.bla', {'test_key': 'test_val'})
mock_pickle_dump.assert_called_once_with({'test_key': 'test_val'}, mock.ANY)
self.assertTrue(m_file.called)
m_file.assert_called_once_with('bla.bla', 'wt')
def test_build_dataset_ids(self):
oids = ['1006610892', '1021076629', '1023464444', '1028356461', '1028356384', '1006160387', '10086561',
'1016776533', '1005739119', '10140926', '10313991', '1007626122', '1021076583', '10257473',
'1021076642', '1004345262', '1002160105', '1023176908', '1007626112', '1024325226']
res = load.build_dataset_ids(oids=oids, test=0.2, seed=0)
self.assertTrue(isinstance(res, dict))
self.assertEqual(len(res.keys()), 4)
self.assertCountEqual(res.keys(), ['tr_ids', 'te_ids', 'trtr_ids', 'trte_ids'])
self.assertEqual(len(res['tr_ids'] + res['te_ids']), len(oids))
self.assertCountEqual(res['tr_ids'] + res['te_ids'], oids)
self.assertTrue(set(res['tr_ids']).isdisjoint(res['te_ids']))
self.assertEqual(len(res['trtr_ids'] + res['trte_ids']), len(res['tr_ids']))
self.assertCountEqual(res['trtr_ids'] + res['trte_ids'], res['tr_ids'])
self.assertTrue(set(res['trtr_ids']).isdisjoint(res['trte_ids']))
self.assertRaisesRegex(ValueError, "test_size=1.000000 should be smaller than 1.0 or be an integer",
load.build_dataset_ids, oids, 1.0, 0)
datasets2 = {'tr_ids': [], 'te_ids': [], 'trte_ids': [], 'trtr_ids': []}
res = load.build_dataset_ids([], test=0.2, seed=0)
self.assertTrue(isinstance(res, dict))
self.assertDictEqual(res, datasets2)
res = load.build_dataset_ids(oids, test=0.0, seed=0)
self.assertTrue(isinstance(res, dict))
self.assertEqual(len(res.keys()), 4)
self.assertCountEqual(res.keys(), ['tr_ids', 'te_ids', 'trtr_ids', 'trte_ids'])
self.assertEqual(len(res['tr_ids'] + res['te_ids']), len(oids))
self.assertCountEqual(res['tr_ids'] + res['te_ids'], oids)
self.assertTrue(set(res['tr_ids']).isdisjoint(res['te_ids']))
self.assertEqual(len(res['te_ids']), 0)
self.assertEqual(len(res['tr_ids']), len(oids))
self.assertEqual(len(res['trtr_ids'] + res['trte_ids']), len(res['tr_ids']))
self.assertCountEqual(res['trtr_ids'] + res['trte_ids'], res['tr_ids'])
self.assertTrue(set(res['trtr_ids']).isdisjoint(res['trte_ids']))
self.assertRaisesRegex(ValueError, "test_size=1.000000 should be smaller than 1.0 or be an integer",
load.build_dataset_ids, oids, 1.0, 0)
def test_classes_to_numerical(self):
temp = defaultdict(list)
temp['1004345262'] = \
'TGTTGCGTTAACAACAAACCAACCTCCGACCCAAAACAAAGATGAAAATAAAAGATGCCACCCAAACGCCGACTAGTGGACAGCCCAGAAGATATGGAAGAAA' \
'GATGCCACCCAAACGCCGACTAGTGGACAGCCCAGAAGATATGGAAGACGAGGGACCCTCTGACCGACCAACTCACCTACCCAAACTCCCAGGAACC'
temp['10043452'] = \
'GATGCCACCCAAACGCCGACTAGTGGACAGCCCAGAAGATATGGAAGACGAGGGACCCTCTGACCGACCAACTCACCTACCCAAACTCCCAGGAACC' \
'TGTTGCGTTAACAACAAACCAACCTCCGACCCAAAACAAAGATGAAAATAAAAGATGCCACCCAAACGCCGACTAGTGGACAGCCCAGAAGATATGGA'
temp['1023464444'] = \
'AAACACAACAGGGCCTCAAGCCTGTCGCAAAAAGAACAGGTAACAACGACAGGAACGTGGCGGACGAGATACAGACCGGCACGTAAACCCAACCGACAC' \
'ATCCAATATGGTACCCCTCATTGAACCACATAACACAACACAGGCCGCAACTCCGAATACGCATGACAATCACCAAGAATGGGCAAGCTCAATCGCAGCACTCATG'
temp['1028356461'] = \
'CCAATCCCGACCGGAATGGAGGTCCTGACAGGGTACTAAACCCAGTGTAGCGCCCACACGCAATCAGAACAAGACAAAAGCCCCCTAAACCCCACTCCGAAAA' \
'GCGGACAAAAATCCAACCTCATACAAACAAACAAGGGCTAGATGCCAACAGGGACTGCCATCCAATGAGAATGTCCATAGGAGTCGAAACAAAGCCA'
temp['1028356384'] = \
'GAAGCCACCAGAAAGATAAGTGAAACAGTACACGAGCCCTAAACACAACGAATCTTCATAATAACCACCCGACTAAGCGACAAAACCACAGGAACCGACCC' \
'AGACGAAAGCACCGACCAGTGATCACAACTCTTTCGAGGTCACACCCGGTACTACGTAAGTGCCACCATCGCAGCTAAGAGGGCACGCA'
labels = {'1004345262':
'Viruses;ssRNA viruses;ssRNA negative-strand viruses;Mononegavirales;Bornaviridae;Bornavirus',
'10043452': 'Viruses;ssRNA viruses;ssRNA positive-strand viruses;ViralesA;ViridaeB;VirusC',
'1023464444': 'Viruses;ssDNA viruses;ssDNA negative-strand viruses;ViralesA;ViridaeB;VirusC',
'1028356461': 'Viruses;ssDNA viruses;ssDNA negative-strand viruses;ViridaeB;VirusC',
'1028356384': 'Viruses;ssDNA viruses;ssDNA negative-strand viruses;ViridaeB;VirusC'}
res_temp = defaultdict(int)
res_temp[0] = 195.0
res_temp[1] = 200.0
res_temp[2] = 205.0
res_temp[2] = 205.0
res_temp[2] = 195.0
res_expect = ({'10043452': 0, '1004345262': 1, '1023464444': 2, '1028356461': 3, '1028356384': 4}, res_temp)
res = load.classes_to_numerical(temp, labels)
self.assertTrue(res, res_expect)
# try with empty
res = load.classes_to_numerical(defaultdict(list), {})
self.assertTrue(res, ({}, defaultdict(int)))
@patch('VirClass.VirClass.load.load_from_file_fasta')
@patch('VirClass.VirClass.load.classes_to_numerical')
@patch('VirClass.VirClass.load.build_dataset_ids')
@patch('VirClass.VirClass.load.one_hot')
@patch('VirClass.VirClass.load.os.path.join')
@patch('VirClass.VirClass.load.dataset_from_id')
@patch('VirClass.VirClass.load.pickle.dump')
@patch('VirClass.VirClass.load.load_dataset')
def test_load_data(self, load_dataset_mock, pickle_mock, dataset_mock, os_mock, one_hot_mock, arg2, arg3, arg4):
self.assertRaisesRegex(AssertionError, "Test size is in wrong range - it must be between 0.0 and 1.0.",
load.load_data, filename='a.fasta.gz', test=1.0)
self.assertRaisesRegex(AssertionError, "Test size is in wrong range - it must be between 0.0 and 1.0.",
load.load_data, filename='a.fasta.gz', test=-1.0)
self.assertRaisesRegex(AssertionError, "Sampling size is in wrong range - it must be between 0.0 and 1.0.",
load.load_data, filename='a.fasta.gz', sample=2.0)
self.assertRaisesRegex(AssertionError, "Sampling size is in wrong range - it must be between 0.0 and 1.0.",
load.load_data, filename='a.fasta.gz', sample=-1.0)
self.assertRaisesRegex(AssertionError, "Currently supported suffixes is '.fasta.gz'.",
load.load_data, filename='a.txt')
self.assertRaisesRegex(AssertionError, "Both transmission dictionary and unique nucleotides cannot be empty.",
load.load_data, filename='a.fasta.gz')
temp = defaultdict(list)
temp['1004345262'] = \
'TGTTGCGTTAACAACAAACCAACCTCCGACCCAAAACAAAGATGAAAATAAAAGATGCCACCCAAACGCCGACTAGTGGACAGCCCAGAAGATATGGAAGAAA' \
'GATGCCACCCAAACGCCGACTAGTGGACAGCCCAGAAGATATGGAAGACGAGGGACCCTCTGACCGACCAACTCACCTACCCAAACTCCCAGGAACC'
temp['10043452'] = \
'GATGCCACCCAAACGCCGACTAGTGGACAGCCCAGAAGATATGGAAGACGAGGGACCCTCTGACCGACCAACTCACCTACCCAAACTCCCAGGAACC' \
'TGTTGCGTTAACAACAAACCAACCTCCGACCCAAAACAAAGATGAAAATAAAAGATGCCACCCAAACGCCGACTAGTGGACAGCCCAGAAGATATGGA'
temp['1023464444'] = \
'AAACACAACAGGGCCTCAAGCCTGTCGCAAAAAGAACAGGTAACAACGACAGGAACGTGGCGGACGAGATACAGACCGGCACGTAAACCCAACCGACAC' \
'ATCCAATATGGTACCCCTCATTGAACCACATAACACAACACAGGCCGCAACTCCGAATACGCATGACAATCACCAAGAATGGGCAAGCTCAATCGCAGCACTCATG'
temp['1028356461'] = \
'CCAATCCCGACCGGAATGGAGGTCCTGACAGGGTACTAAACCCAGTGTAGCGCCCACACGCAATCAGAACAAGACAAAAGCCCCCTAAACCCCACTCCGAAAA' \
'GCGGACAAAAATCCAACCTCATACAAACAAACAAGGGCTAGATGCCAACAGGGACTGCCATCCAATGAGAATGTCCATAGGAGTCGAAACAAAGCCA'
temp['1028356384'] = \
'GAAGCCACCAGAAAGATAAGTGAAACAGTACACGAGCCCTAAACACAACGAATCTTCATAATAACCACCCGACTAAGCGACAAAACCACAGGAACCGACCC' \
'AGACGAAAGCACCGACCAGTGATCACAACTCTTTCGAGGTCACACCCGGTACTACGTAAGTGCCACCATCGCAGCTAAGAGGGCACGCA'
labels_assert = {'1004345262':
'Viruses;ssRNA viruses;ssRNA negative-strand viruses;Mononegavirales;Bornaviridae;Bornavirus'}
load.load_from_file_fasta.return_value = (temp, labels_assert)
self.assertRaisesRegex(AssertionError,
"When loading from fasta keys in data dictionary and labels dictionary must be same.",
load.load_data, filename='a.fasta.gz', unique_nuc='ATCG')
labels = {'1023464444': 'Viruses;ssDNA viruses;ssDNA negative-strand viruses;ViralesA;ViridaeB;VirusC',
'1028356461': 'Viruses;ssDNA viruses;ssDNA negative-strand viruses;ViridaeB;VirusC',
'10043452': 'Viruses;ssRNA viruses;ssRNA positive-strand viruses;ViralesA;ViridaeB;VirusC',
'1028356384': 'Viruses;ssDNA viruses;ssDNA negative-strand viruses;ViridaeB;VirusC',
'1004345262':
'Viruses;ssRNA viruses;ssRNA negative-strand viruses;Mononegavirales;Bornaviridae;Bornavirus'}
load.load_from_file_fasta.return_value = (temp, labels)
res_temp = defaultdict(int)
res_temp[0] = 205.0
res_temp[1] = 200.0
res_temp[2] = 190.0
res_temp[3] = 197.5
classes_to_numerical_expected = ({'1023464444': 0, '1028356461': 1, '10043452': 2, '1028356384': 1,
'1004345262': 3}, res_temp)
load.classes_to_numerical.return_value = classes_to_numerical_expected
load.build_dataset_ids.return_value = {'tr_ids': ['1004345262', '10043452', '1028356461', '1028356384'],
'te_ids': ['1023464444'],
'trtr_ids': ['10043452', '1028356461', '1004345262'],
'trte_ids': ['1028356384']}
trans_dict = {"A": [1, 0, 0, 0], "T": [0, 1, 0, 0], "C": [0, 0, 1, 0], "G": [0, 0, 0, 1]}
# load.dataset_from_id.return_value = {}
dataset_expected = {'teX': [
[1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0,
1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0,
1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1,
0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0,
0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0,
1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0,
1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1,
1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0,
1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0,
1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0,
1, 0, 0, 0]], 'teY': [0], 'trX': [
[0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0,
0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1,
1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0,
0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0,
1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0,
1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1,
1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0,
0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0,
1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1,
0, 1, 0, 0],
[0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1,
1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1,
0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1,
1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1,
0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0,
0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0,
0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0,
0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1,
1, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0,
1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0,
0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0,
0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0,
1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1,
1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0,
1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0,
0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0,
1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 1]], 'trY': [3, 2, 1], 'trteX': [
[0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0,
1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0,
1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0,
0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1,
0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0,
0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0,
1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0,
0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0,
1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 0]], 'trteY': [1]}
dataset_mock.side_effect = [(dataset_expected['teX'], dataset_expected['teY']),
(dataset_expected['trX'], dataset_expected['trY']),
(dataset_expected['trteX'], dataset_expected['trteY'])]
os_mock.side_effect = ['dummy', 'dummy', 'a-trX.fasta.gz', 'a-teX.fasta.gz', 'a-trY.fasta.gz', 'a-teY.fasta.gz',
'a-trteX.fasta.gz', 'a-trteY.fasta.gz']
# mock load_dataset without side effect
# mock load_dataset with side effect IOError
load_dataset_mock.side_effect = IOError()
m_file = mock_open()
with patch('VirClass.VirClass.load.gzip.open', m_file):
res = load.load_data(filename='a.fasta.gz', trans_dict=trans_dict, onehot=False)
self.assertEqual(m_file.call_count, 6)
self.assertTrue(isinstance(res, tuple))
self.assertEqual(pickle_mock.call_count, 6)
self.assertDictEqual(res[-1], res_temp)
self.assertTrue(isinstance(res[-2], int))
for idx, dataset_name in enumerate(['trX', 'teX', 'trY', 'teY', 'trteX', 'trteY']):
m_file.assert_any_call('a-' + dataset_name + '.fasta.gz', 'wt')
pickle_mock.any_call(dataset_expected[dataset_name], mock.ANY)
np.testing.assert_array_equal(res[idx], np.asarray(dataset_expected[dataset_name]))
dataset_mock.side_effect = [(dataset_expected['teX'], dataset_expected['teY']),
(dataset_expected['trX'], dataset_expected['trY']),
(dataset_expected['trteX'], dataset_expected['trteY'])]
os_mock.side_effect = ['dummy', 'dummy', 'a-trX.fasta.gz', 'a-teX.fasta.gz', 'a-trY.fasta.gz', 'a-teY.fasta.gz',
'a-trteX.fasta.gz', 'a-trteY.fasta.gz']
m_file.reset_mock()
pickle_mock.reset_mock()
one_hot_mock.side_effect = (np.array([[0, 0, 0, 1], [0, 0, 1, 0], [0, 1, 0, 0]]),
np.array([[1, 0, 0, 0]]),
np.array([[0, 1, 0, 0]]))
dataset_expected['trY'] = np.array([[0, 0, 0, 1], [0, 0, 1, 0], [0, 1, 0, 0]])
dataset_expected['teY'] = np.array([[1, 0, 0, 0]])
dataset_expected['trteY'] = np.array([[0, 1, 0, 0]])
with patch('VirClass.VirClass.load.gzip.open', m_file):
res = load.load_data(filename='a.fasta.gz', trans_dict=trans_dict, onehot=True)
self.assertEqual(m_file.call_count, 6)
self.assertTrue(isinstance(res, tuple))
self.assertEqual(pickle_mock.call_count, 6)
self.assertDictEqual(res[-1], res_temp)
self.assertTrue(isinstance(res[-2], int))
for idx, dataset_name in enumerate(['trX', 'teX', 'trY', 'teY', 'trteX', 'trteY']):
m_file.assert_any_call('a-' + dataset_name + '.fasta.gz', 'wt')
pickle_mock.any_call(dataset_expected[dataset_name], mock.ANY)
np.testing.assert_array_equal(res[idx], np.asarray(dataset_expected[dataset_name]))
if __name__ == '__main__':
main()
| 71.060038
| 120
| 0.481135
| 7,170
| 37,875
| 2.481311
| 0.038075
| 0.294756
| 0.276207
| 0.187061
| 0.791524
| 0.766061
| 0.739981
| 0.709572
| 0.702996
| 0.697375
| 0
| 0.230482
| 0.320238
| 37,875
| 532
| 121
| 71.193609
| 0.460538
| 0.015498
| 0
| 0.46383
| 0
| 0
| 0.183816
| 0.103915
| 0
| 0
| 0
| 0
| 0.174468
| 1
| 0.019149
| false
| 0
| 0.014894
| 0
| 0.03617
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
a040a18bb41b03a411bd051c4e6d1ca9a4f6a1dd
| 129
|
py
|
Python
|
firewood/models/gan/__init__.py
|
kynk94/torch-firewood
|
8ecd03c166bcadaae22a6cb2c1457a82f2c644eb
|
[
"MIT"
] | 1
|
2022-03-26T12:51:27.000Z
|
2022-03-26T12:51:27.000Z
|
firewood/models/gan/__init__.py
|
kynk94/torch-firewood
|
8ecd03c166bcadaae22a6cb2c1457a82f2c644eb
|
[
"MIT"
] | null | null | null |
firewood/models/gan/__init__.py
|
kynk94/torch-firewood
|
8ecd03c166bcadaae22a6cb2c1457a82f2c644eb
|
[
"MIT"
] | null | null | null |
from . import DCGAN, GAN, LSGAN, PGGAN, pix2pix, pix2pixHD
__all__ = ["DCGAN", "GAN", "LSGAN", "PGGAN", "pix2pix", "pix2pixHD"]
| 32.25
| 68
| 0.658915
| 15
| 129
| 5.4
| 0.6
| 0.197531
| 0.320988
| 0.444444
| 0.839506
| 0.839506
| 0
| 0
| 0
| 0
| 0
| 0.036036
| 0.139535
| 129
| 3
| 69
| 43
| 0.693694
| 0
| 0
| 0
| 0
| 0
| 0.263566
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
a0974a2b78590a2106815485f16e96f892842194
| 3,195
|
py
|
Python
|
systemdlint/systemdlint/conf/knownPaths.py
|
priv-kweihmann/systemdlint
|
d9909d2e2d970599bb2015e2a667d4debf063384
|
[
"BSD-2-Clause"
] | 17
|
2019-06-10T00:40:04.000Z
|
2021-12-27T21:26:52.000Z
|
systemdlint/systemdlint/conf/knownPaths.py
|
priv-kweihmann/systemdlint
|
d9909d2e2d970599bb2015e2a667d4debf063384
|
[
"BSD-2-Clause"
] | 53
|
2019-04-26T14:56:13.000Z
|
2021-12-21T09:52:05.000Z
|
systemdlint/systemdlint/conf/knownPaths.py
|
priv-kweihmann/systemdlint
|
d9909d2e2d970599bb2015e2a667d4debf063384
|
[
"BSD-2-Clause"
] | 2
|
2020-05-28T11:20:41.000Z
|
2021-12-27T21:07:32.000Z
|
KNOWN_PATHS = [
"/etc/systemd/*",
"/etc/systemd/**/*",
"/lib/systemd/*",
"/lib/systemd/**/*",
"/run/systemd/*",
"/run/systemd/**/*",
"/usr/lib/systemd/*",
"/usr/lib/systemd/**/*",
]
KNOWN_DROPIN_PATHS = {
"user.conf": [
"/etc/systemd/%unit%.d/*.conf", "/run/systemd/%unit%.d/*.conf", "/usr/lib/systemd/%unit%.d/*.conf", "/lib/systemd/%unit%.d/*.conf"
],
"system.conf": [
"/etc/systemd/%unit%.d/*.conf", "/run/systemd/%unit%.d/*.conf", "/usr/lib/systemd/%unit%.d/*.conf", "/lib/systemd/%unit%.d/*.conf"
],
".service": [
"/etc/systemd/system/%unit%.d/*.conf", "/run/systemd/system/%unit%.d/*.conf", "/usr/lib/systemd/%unit%.d/*.conf", "/lib/systemd/%unit%.d/*.conf"
],
".target": [
"/etc/systemd/system/%unit%.d/*.conf", "/run/systemd/system/%unit%.d/*.conf", "/usr/lib/systemd/%unit%.d/*.conf", "/lib/systemd/%unit%.d/*.conf"
],
".mount": [
"/etc/systemd/system/%unit%.d/*.conf", "/run/systemd/system/%unit%.d/*.conf", "/usr/lib/systemd/%unit%.d/*.conf", "/lib/systemd/%unit%.d/*.conf"
],
".automount": [
"/etc/systemd/system/%unit%.d/*.conf", "/run/systemd/system/%unit%.d/*.conf", "/usr/lib/systemd/%unit%.d/*.conf", "/lib/systemd/%unit%.d/*.conf"
],
".device": [
"/etc/systemd/system/%unit%.d/*.conf", "/run/systemd/system/%unit%.d/*.conf", "/usr/lib/systemd/%unit%.d/*.conf", "/lib/systemd/%unit%.d/*.conf"
],
".swap": [
"/etc/systemd/system/%unit%.d/*.conf", "/run/systemd/system/%unit%.d/*.conf", "/usr/lib/systemd/%unit%.d/*.conf", "/lib/systemd/%unit%.d/*.conf"
],
".path": [
"/etc/systemd/system/%unit%.d/*.conf", "/run/systemd/system/%unit%.d/*.conf", "/usr/lib/systemd/%unit%.d/*.conf", "/lib/systemd/%unit%.d/*.conf"
],
".timer": [
"/etc/systemd/system/%unit%.d/*.conf", "/run/systemd/system/%unit%.d/*.conf", "/usr/lib/systemd/%unit%.d/*.conf", "/lib/systemd/%unit%.d/*.conf"
],
".scope": [
"/etc/systemd/system/%unit%.d/*.conf", "/run/systemd/system/%unit%.d/*.conf", "/usr/lib/systemd/%unit%.d/*.conf", "/lib/systemd/%unit%.d/*.conf"
],
".slice": [
"/etc/systemd/system/%unit%.d/*.conf", "/run/systemd/system/%unit%.d/*.conf", "/usr/lib/systemd/%unit%.d/*.conf", "/lib/systemd/%unit%.d/*.conf"
],
".network": [
"/etc/systemd/network/%unit%.d/*.conf", "/run/systemd/network/%unit%.d/*.conf", "/usr/lib/systemd/%unit%.d/*.conf", "/lib/systemd/%unit%.d/*.conf"
],
".netdev": [
"/etc/systemd/%unit%.d/*.conf", "/run/systemd/%unit%.d/*.conf", "/usr/lib/systemd/%unit%.d/*.conf", "/lib/systemd/%unit%.d/*.conf"
],
"timesyncd.conf": [
"/etc/systemd/%unit%.d/*.conf", "/run/systemd/%unit%.d/*.conf", "/usr/lib/systemd/%unit%.d/*.conf", "/lib/systemd/%unit%.d/*.conf"
],
"journald.conf": [
"/etc/systemd/%unit%.d/*.conf", "/run/systemd/%unit%.d/*.conf", "/usr/lib/systemd/%unit%.d/*.conf", "/lib/systemd/%unit%.d/*.conf"
],
"journal-upload.conf": [
"/etc/systemd/%unit%.d/*.conf", "/run/systemd/%unit%.d/*.conf", "/usr/lib/systemd/%unit%.d/*.conf", "/lib/systemd/%unit%.d/*.conf"
],
}
| 49.153846
| 154
| 0.538654
| 425
| 3,195
| 4.042353
| 0.068235
| 0.197905
| 0.356228
| 0.428405
| 0.869616
| 0.846333
| 0.846333
| 0.846333
| 0.846333
| 0.846333
| 0
| 0
| 0.137715
| 3,195
| 64
| 155
| 49.921875
| 0.623593
| 0
| 0
| 0.52381
| 0
| 0
| 0.753365
| 0.672613
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
262876d8c6708b71339ea205054725268f995e0a
| 49
|
py
|
Python
|
torch_interpolations/__init__.py
|
sbarratt/torch_interpolations
|
91f0c9127dd20747daade34e96747426e2183fbf
|
[
"Apache-2.0"
] | 84
|
2020-07-16T20:09:48.000Z
|
2021-12-13T17:29:20.000Z
|
torch_interpolations/__init__.py
|
sbarratt/torch_interpolations
|
91f0c9127dd20747daade34e96747426e2183fbf
|
[
"Apache-2.0"
] | 3
|
2021-02-22T10:58:39.000Z
|
2021-06-18T16:15:32.000Z
|
torch_interpolations/__init__.py
|
sbarratt/torch_interpolations
|
91f0c9127dd20747daade34e96747426e2183fbf
|
[
"Apache-2.0"
] | 8
|
2020-07-28T14:45:50.000Z
|
2021-12-21T09:16:45.000Z
|
from .multilinear import RegularGridInterpolator
| 24.5
| 48
| 0.897959
| 4
| 49
| 11
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 49
| 1
| 49
| 49
| 0.977778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2633d6417a3f4e2a115b1dba314b45a11550f627
| 57,954
|
py
|
Python
|
biserici_inlemnite/biserici/migrations/0014_historicalcomponentaartistica_historicalfinisaj_historicalfinisajactualinvelitoare_historicalfinisaj.py
|
ck-tm/biserici-inlemnite
|
c9d12127b92f25d3ab2fcc7b4c386419fe308a4e
|
[
"MIT"
] | null | null | null |
biserici_inlemnite/biserici/migrations/0014_historicalcomponentaartistica_historicalfinisaj_historicalfinisajactualinvelitoare_historicalfinisaj.py
|
ck-tm/biserici-inlemnite
|
c9d12127b92f25d3ab2fcc7b4c386419fe308a4e
|
[
"MIT"
] | null | null | null |
biserici_inlemnite/biserici/migrations/0014_historicalcomponentaartistica_historicalfinisaj_historicalfinisajactualinvelitoare_historicalfinisaj.py
|
ck-tm/biserici-inlemnite
|
c9d12127b92f25d3ab2fcc7b4c386419fe308a4e
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.13 on 2021-07-30 12:54
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import simple_history.models
class Migration(migrations.Migration):
dependencies = [
('nomenclatoare', '0005_boltapestealtar_historicalboltapestealtar_historicaltipboltapestealtar_historicaltipboltapronaos_tip'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('biserici', '0013_auto_20210730_1553'),
]
operations = [
migrations.CreateModel(
name='HistoricalFotografii',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('biserica', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.biserica')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical fotografii',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFotografieTurn',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('poza', models.TextField(max_length=250)),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('fotografii', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.fotografii')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical fotografie turn',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFotografieTalpa',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('poza', models.TextField(max_length=250)),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('fotografii', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.fotografii')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical fotografie talpa',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFotografieStreasina',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('poza', models.TextField(max_length=250)),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('fotografii', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.fotografii')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical fotografie streasina',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFotografiePortalPronaos',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('poza', models.TextField(max_length=250)),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('fotografii', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.fotografii')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical fotografie portal pronaos',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFotografiePortalNaos',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('poza', models.TextField(max_length=250)),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('fotografii', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.fotografii')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical fotografie portal naos',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFotografiePortal',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('poza', models.TextField(max_length=250)),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('fotografii', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.fotografii')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical fotografie portal',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFotografiePisanieInscriptieCtitorMester',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('poza', models.TextField(max_length=250)),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('fotografii', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.fotografii')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical fotografie pisanie inscriptie ctitor mester',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFotografieObiectCult',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('poza', models.TextField(max_length=250)),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('fotografii', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.fotografii')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical fotografie obiect cult',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFotografieMobilierCandelabre',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('poza', models.TextField(max_length=250)),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('fotografii', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.fotografii')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical fotografie mobilier candelabre',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFotografieInvelitoare',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('poza', models.TextField(max_length=250)),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('fotografii', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.fotografii')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical fotografie invelitoare',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFotografieInteriorDesfasurat',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('poza', models.TextField(max_length=250)),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('fotografii', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.fotografii')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical fotografie interior desfasurat',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFotografieIconostasNaos',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('poza', models.TextField(max_length=250)),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('fotografii', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.fotografii')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical fotografie iconostas naos',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFotografieIconostasAltar',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('poza', models.TextField(max_length=250)),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('fotografii', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.fotografii')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical fotografie iconostas altar',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFotografieIcoana',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('poza', models.TextField(max_length=250)),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('fotografii', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.fotografii')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical fotografie icoana',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFotografieFereastra',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('poza', models.TextField(max_length=250)),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('fotografii', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.fotografii')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical fotografie fereastra',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFotografieFatada',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('poza', models.TextField(max_length=250)),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('fotografii', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.fotografii')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical fotografie fatada',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFotografieDetaliuPod',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('poza', models.TextField(max_length=250)),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('detaliu', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.detaliupodturn')),
('fotografii', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.fotografii')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical fotografie detaliu pod',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFotografieDetaliuBolta',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('poza', models.TextField(max_length=250)),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('fotografii', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.fotografii')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical fotografie detaliu bolta',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFotografieDegradariPod',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('poza', models.TextField(max_length=250)),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('fotografii', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.fotografii')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical fotografie degradari pod',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFotografieDegradariInterior',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('poza', models.TextField(max_length=250)),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('fotografii', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.fotografii')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical fotografie degradari interior',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFotografieDegradariExterioare',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('poza', models.TextField(max_length=250)),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('fotografii', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.fotografii')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical fotografie degradari exterioare',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFotografieCruceBiserica',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('poza', models.TextField(max_length=250)),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('fotografii', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.fotografii')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical fotografie cruce biserica',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFotografieCheotoar',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('poza', models.TextField(max_length=250)),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('fotografii', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.fotografii')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical fotografie cheotoar',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFotografieAnsamblu',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('poza', models.TextField(max_length=250)),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('fotografii', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.fotografii')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical fotografie ansamblu',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFinisajTavan',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('observatii', models.TextField(blank=True, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('element', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.elementbiserica')),
('finisaj', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.finisaj')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('material', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.finisaj')),
],
options={
'verbose_name': 'historical finisaj tavan',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFinisajTamburTurn',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('sindrila_lungime', models.IntegerField(blank=True, null=True)),
('sindrila_latime_medie', models.IntegerField(blank=True, null=True)),
('sindrila_grosime_medie', models.IntegerField(blank=True, null=True)),
('sindrila_pasul_latuirii', models.IntegerField(blank=True, null=True)),
('sindrila_pasul_baterii', models.IntegerField(blank=True, null=True)),
('sindrila_numar_straturi', models.IntegerField(blank=True, null=True)),
('sindrila_cu_horj', models.BooleanField(default=False)),
('sindrila_cu_tesitura', models.BooleanField(default=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('finisaj', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.finisaj')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('material', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.finisajexterior')),
('sindrlia_esenta_lemnoasa', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.esentalemnoasa')),
('sindrlia_forma_botului', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.tipbotsindrila')),
('sindrlia_prelucrare', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.tipprelucraresindrila')),
('sindrlia_tipul_de_batere', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.tipbateresindrila')),
('sindrlia_tipul_prindere', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.tipprinderesindrila')),
],
options={
'verbose_name': 'historical finisaj tambur turn',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFinisajPeretiInterior',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('observatii', models.TextField(blank=True, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('element', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.elementbiserica')),
('finisaj', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.finisaj')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('material', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.materialfinisajperetiinteriori')),
],
options={
'verbose_name': 'historical finisaj pereti interior',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFinisajPardosea',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('observatii', models.TextField(blank=True, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('element', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.elementbiserica')),
('finisaj', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.finisaj')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('material', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.materialfinisajpardoseli')),
],
options={
'verbose_name': 'historical finisaj pardosea',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFinisajInvelitoareTurn',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('sindrila_lungime', models.IntegerField(blank=True, null=True)),
('sindrila_latime_medie', models.IntegerField(blank=True, null=True)),
('sindrila_grosime_medie', models.IntegerField(blank=True, null=True)),
('sindrila_pasul_latuirii', models.IntegerField(blank=True, null=True)),
('sindrila_pasul_baterii', models.IntegerField(blank=True, null=True)),
('sindrila_numar_straturi', models.IntegerField(blank=True, null=True)),
('sindrila_cu_horj', models.BooleanField(default=False)),
('sindrila_cu_tesitura', models.BooleanField(default=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('finisaj', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.finisaj')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('material', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.finisajexterior')),
('sindrlia_esenta_lemnoasa', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.esentalemnoasa')),
('sindrlia_forma_botului', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.tipbotsindrila')),
('sindrlia_prelucrare', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.tipprelucraresindrila')),
('sindrlia_tipul_de_batere', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.tipbateresindrila')),
('sindrlia_tipul_prindere', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.tipprinderesindrila')),
],
options={
'verbose_name': 'historical finisaj invelitoare turn',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFinisajBolti',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('observatii', models.TextField(blank=True, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('element', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.elementbiserica')),
('finisaj', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.finisaj')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('material', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.finisaj')),
],
options={
'verbose_name': 'historical finisaj bolti',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFinisajAnteriorInvelitoare',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('etape_anterioare_vizibile', models.BooleanField(default=False)),
('sindrila_pasul_latuirii', models.IntegerField(blank=True, null=True)),
('sindrila_numar_straturi', models.IntegerField(blank=True, null=True)),
('sindrila_cu_horj', models.BooleanField(default=False)),
('sindrila_cu_tesitura', models.BooleanField(default=False)),
('alte_tipuri_invelitoare', models.TextField(blank=True, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('finisaj', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.finisaj')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('sindrlia_esenta_lemnoasa', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.esentalemnoasa')),
('sindrlia_forma_botului', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.tipbotsindrila')),
('sindrlia_tipul_de_batere', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.tipbateresindrila')),
],
options={
'verbose_name': 'historical finisaj anterior invelitoare',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFinisajActualInvelitoare',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('sindrila_lungime', models.IntegerField(blank=True, null=True)),
('sindrila_latime_medie', models.IntegerField(blank=True, null=True)),
('sindrila_grosime_medie', models.IntegerField(blank=True, null=True)),
('sindrila_pasul_latuirii', models.IntegerField(blank=True, null=True)),
('sindrila_pasul_baterii', models.IntegerField(blank=True, null=True)),
('sindrila_numar_straturi', models.IntegerField(blank=True, null=True)),
('sindrila_cu_horj', models.BooleanField(default=False)),
('sindrila_cu_tesitura', models.BooleanField(default=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('finisaj', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.finisaj')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('material', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.finisajexterior')),
('sindrlia_esenta_lemnoasa', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.esentalemnoasa')),
('sindrlia_forma_botului', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.tipbotsindrila')),
('sindrlia_prelucrare', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.tipprelucraresindrila')),
('sindrlia_tipul_de_batere', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.tipbateresindrila')),
('sindrlia_tipul_prindere', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.tipprinderesindrila')),
],
options={
'verbose_name': 'historical finisaj actual invelitoare',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalFinisaj',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('biserica', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.biserica')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical finisaj',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalComponentaArtistica',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('biserica', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.biserica')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical componenta artistica',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
]
| 73.826752
| 213
| 0.616109
| 5,832
| 57,954
| 5.907407
| 0.036523
| 0.048299
| 0.041043
| 0.064496
| 0.946447
| 0.942326
| 0.942326
| 0.942326
| 0.942326
| 0.942326
| 0
| 0.00716
| 0.228802
| 57,954
| 784
| 214
| 73.920918
| 0.763682
| 0.000794
| 0
| 0.843188
| 1
| 0
| 0.20181
| 0.061548
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.005141
| 0
| 0.008997
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
263ff7947eb09a018892e094c9af6c1727b618b1
| 4,938
|
py
|
Python
|
py-rest-client/test/test_product_relations_api.py
|
ausseabed/product-catalogue
|
3deca0301298efe22d7b61028683254ac3bd3c66
|
[
"Apache-2.0"
] | null | null | null |
py-rest-client/test/test_product_relations_api.py
|
ausseabed/product-catalogue
|
3deca0301298efe22d7b61028683254ac3bd3c66
|
[
"Apache-2.0"
] | 4
|
2022-02-13T10:48:44.000Z
|
2022-03-02T21:22:04.000Z
|
py-rest-client/test/test_product_relations_api.py
|
ausseabed/product-catalogue
|
3deca0301298efe22d7b61028683254ac3bd3c66
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
AusSeabed product catalogue
The API description for the Ausseabed product catalogue inventory # noqa: E501
The version of the OpenAPI document: 0.2.2
Contact: AusSeabed@ga.gov.au
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import product_catalogue_py_rest_client
from product_catalogue_py_rest_client.api.product_relations_api import ProductRelationsApi # noqa: E501
from product_catalogue_py_rest_client.rest import ApiException
class TestProductRelationsApi(unittest.TestCase):
"""ProductRelationsApi unit test stubs"""
def setUp(self):
self.api = product_catalogue_py_rest_client.api.product_relations_api.ProductRelationsApi() # noqa: E501
def tearDown(self):
pass
def test_product_relations_controller_create_compilation(self):
"""Test case for product_relations_controller_create_compilation
"""
pass
def test_product_relations_controller_create_l0_survey(self):
"""Test case for product_relations_controller_create_l0_survey
"""
pass
def test_product_relations_controller_create_l2_survey(self):
"""Test case for product_relations_controller_create_l2_survey
"""
pass
def test_product_relations_controller_create_l3_survey(self):
"""Test case for product_relations_controller_create_l3_survey
"""
pass
def test_product_relations_controller_delete_compilation(self):
"""Test case for product_relations_controller_delete_compilation
"""
pass
def test_product_relations_controller_delete_l0_survey(self):
"""Test case for product_relations_controller_delete_l0_survey
"""
pass
def test_product_relations_controller_delete_l2_survey(self):
"""Test case for product_relations_controller_delete_l2_survey
"""
pass
def test_product_relations_controller_delete_l3_survey(self):
"""Test case for product_relations_controller_delete_l3_survey
"""
pass
def test_product_relations_controller_find_all_l0_survey(self):
"""Test case for product_relations_controller_find_all_l0_survey
"""
pass
def test_product_relations_controller_find_all_l2_survey(self):
"""Test case for product_relations_controller_find_all_l2_survey
"""
pass
def test_product_relations_controller_find_all_l3_compilation(self):
"""Test case for product_relations_controller_find_all_l3_compilation
"""
pass
def test_product_relations_controller_find_all_l3_survey(self):
"""Test case for product_relations_controller_find_all_l3_survey
"""
pass
def test_product_relations_controller_find_conditional_compilation(self):
"""Test case for product_relations_controller_find_conditional_compilation
"""
pass
def test_product_relations_controller_find_conditional_l0_survey(self):
"""Test case for product_relations_controller_find_conditional_l0_survey
"""
pass
def test_product_relations_controller_find_conditional_l2_survey(self):
"""Test case for product_relations_controller_find_conditional_l2_survey
"""
pass
def test_product_relations_controller_find_conditional_l3_survey(self):
"""Test case for product_relations_controller_find_conditional_l3_survey
"""
pass
def test_product_relations_controller_find_one_compilation(self):
"""Test case for product_relations_controller_find_one_compilation
"""
pass
def test_product_relations_controller_find_one_l0_survey(self):
"""Test case for product_relations_controller_find_one_l0_survey
"""
pass
def test_product_relations_controller_find_one_l2_survey(self):
"""Test case for product_relations_controller_find_one_l2_survey
"""
pass
def test_product_relations_controller_find_one_l3_survey(self):
"""Test case for product_relations_controller_find_one_l3_survey
"""
pass
def test_product_relations_controller_update_compilation(self):
"""Test case for product_relations_controller_update_compilation
"""
pass
def test_product_relations_controller_update_l0_survey(self):
"""Test case for product_relations_controller_update_l0_survey
"""
pass
def test_product_relations_controller_update_l2_survey(self):
"""Test case for product_relations_controller_update_l2_survey
"""
pass
def test_product_relations_controller_update_l3_survey(self):
"""Test case for product_relations_controller_update_l3_survey
"""
pass
if __name__ == '__main__':
unittest.main()
| 27.586592
| 113
| 0.730458
| 577
| 4,938
| 5.712305
| 0.116118
| 0.242718
| 0.378641
| 0.131068
| 0.860437
| 0.851942
| 0.838592
| 0.776396
| 0.56068
| 0.175971
| 0
| 0.013127
| 0.213244
| 4,938
| 178
| 114
| 27.741573
| 0.835264
| 0.415148
| 0
| 0.416667
| 0
| 0
| 0.002927
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.433333
| false
| 0.416667
| 0.083333
| 0
| 0.533333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 9
|
cd1aa872b14f15c69687b69d62736a5238558024
| 118
|
py
|
Python
|
backend/test/test_factory.py
|
lkoehl/doppelkopf
|
fe80fc10cd8296da2a4ab97afb502eb69e50fb9b
|
[
"MIT"
] | 38
|
2018-02-14T19:28:00.000Z
|
2022-03-30T07:28:12.000Z
|
backend/test/test_factory.py
|
lkoehl/doppelkopf
|
fe80fc10cd8296da2a4ab97afb502eb69e50fb9b
|
[
"MIT"
] | 57
|
2018-10-01T10:54:32.000Z
|
2022-02-08T23:26:40.000Z
|
backend/test/test_factory.py
|
AlexRogalskiy/doppelkopf
|
cbfc95d56f102ab8e01e194603e9f4abf09e2e14
|
[
"MIT"
] | 11
|
2018-12-17T17:02:31.000Z
|
2022-03-10T17:47:07.000Z
|
from doppelkopf import create_app
def test_config():
assert create_app({"FOO": True}).config.get("FOO") is True
| 19.666667
| 62
| 0.720339
| 18
| 118
| 4.555556
| 0.722222
| 0.219512
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144068
| 118
| 5
| 63
| 23.6
| 0.811881
| 0
| 0
| 0
| 0
| 0
| 0.050847
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cd842c2ca02a5f1b522ae7b08d2e31e5a695360e
| 132,117
|
py
|
Python
|
utools/maya/widgets/validator_res.py
|
theiviaxx/utools
|
6501e52ad77edb2beab8f33028460f32b3231b80
|
[
"BSD-3-Clause"
] | 1
|
2019-09-25T22:47:02.000Z
|
2019-09-25T22:47:02.000Z
|
utools/maya/widgets/validator_res.py
|
theiviaxx/utools
|
6501e52ad77edb2beab8f33028460f32b3231b80
|
[
"BSD-3-Clause"
] | 4
|
2015-12-16T22:14:59.000Z
|
2015-12-16T22:18:04.000Z
|
utools/maya/widgets/validator_res.py
|
theiviaxx/utools
|
6501e52ad77edb2beab8f33028460f32b3231b80
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# Resource object code
#
# Created: Sat Sep 5 14:57:44 2015
# by: The Resource Compiler for PySide (Qt v4.8.5)
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore
qt_resource_data = "\x00\x00I\x8f\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x000\x00\x00\x000\x08\x06\x00\x00\x00W\x02\xf9\x87\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x0aOiCCPPhotoshop ICC profile\x00\x00x\xda\x9dSgTS\xe9\x16=\xf7\xde\xf4BK\x88\x80\x94KoR\x15\x08 RB\x8b\x80\x14\x91&*!\x09\x10J\x88!\xa1\xd9\x15Q\xc1\x11EE\x04\x1b\xc8\xa0\x88\x03\x8e\x8e\x80\x8c\x15Q,\x0c\x8a\x0a\xd8\x07\xe4!\xa2\x8e\x83\xa3\x88\x8a\xca\xfb\xe1{\xa3k\xd6\xbc\xf7\xe6\xcd\xfe\xb5\xd7>\xe7\xac\xf3\x9d\xb3\xcf\x07\xc0\x08\x0c\x96H3Q5\x80\x0c\xa9B\x1e\x11\xe0\x83\xc7\xc4\xc6\xe1\xe4.@\x81\x0a$p\x00\x10\x08\xb3d!s\xfd#\x01\x00\xf8~<<+\x22\xc0\x07\xbe\x00\x01x\xd3\x0b\x08\x00\xc0M\x9b\xc00\x1c\x87\xff\x0f\xeaB\x99\x5c\x01\x80\x84\x01\xc0t\x918K\x08\x80\x14\x00@z\x8eB\xa6\x00@F\x01\x80\x9d\x98&S\x00\xa0\x04\x00`\xcbcb\xe3\x00P-\x00`'\x7f\xe6\xd3\x00\x80\x9d\xf8\x99{\x01\x00[\x94!\x15\x01\xa0\x91\x00 \x13e\x88D\x00h;\x00\xac\xcfV\x8aE\x00X0\x00\x14fK\xc49\x00\xd8-\x000IWfH\x00\xb0\xb7\x00\xc0\xce\x10\x0b\xb2\x00\x08\x0c\x000Q\x88\x85)\x00\x04{\x00`\xc8##x\x00\x84\x99\x00\x14F\xf2W<\xf1+\xae\x10\xe7*\x00\x00x\x99\xb2<\xb9$9E\x81[\x08-q\x07WW.\x1e(\xceI\x17+\x146a\x02a\x9a@.\xc2y\x99\x192\x814\x0f\xe0\xf3\xcc\x00\x00\xa0\x91\x15\x11\xe0\x83\xf3\xfdx\xce\x0e\xae\xce\xce6\x8e\xb6\x0e_-\xea\xbf\x06\xff\x22bb\xe3\xfe\xe5\xcf\xabp@\x00\x00\xe1t~\xd1\xfe,/\xb3\x1a\x80;\x06\x80m\xfe\xa2%\xee\x04h^\x0b\xa0u\xf7\x8bf\xb2\x0f@\xb5\x00\xa0\xe9\xdaW\xf3p\xf8~<<E\xa1\x90\xb9\xd9\xd9\xe5\xe4\xe4\xd8J\xc4B[a\xcaW}\xfeg\xc2_\xc0W\xfdl\xf9~<\xfc\xf7\xf5\xe0\xbe\xe2$\x812]\x81G\x04\xf8\xe0\xc2\xcc\xf4L\xa5\x1c\xcf\x92\x09\x84b\xdc\xe6\x8fG\xfc\xb7\x0b\xff\xfc\x1d\xd3\x22\xc4Ib\xb9X*\x14\xe3Q\x12q\x8eD\x9a\x8c\xf32\xa5\x22\x89B\x92)\xc5%\xd2\xffd\xe2\xdf,\xfb\x03>\xdf5\x00\xb0j>\x01{\x91-\xa8]c\x03\xf6K'\x10Xt\xc0\xe2\xf7\x00\x00\xf2\xbbo\xc1\xd4(\x08\x03\x80h\x83\xe1\xcfw\xff\xef?\xfdG\xa0%\x00\x80fI\x92q\x00\x00^D$.T\xca\xb3?\xc7\x08\x00\x00D\xa0\x81*\xb0A\x1b\xf4\xc1\x18,\xc0\x06\x1c\xc1\x05\xdc\xc1\x0b\xfc`6\x84B$\xc4\xc2B\x10B\x0ad\x80\x1cr`)\xac\x82B(\x86\xcd\xb0\x1d*`/\xd4@\x1d4\xc0Qh\x86\x93p\x0e.\xc2U\xb8\x0e=p\x0f\xfaa\x08\x9e\xc1(\xbc\x81\x09\x04A\xc8\x08\x13a!\xda\x88\x01b\x8aX#\x8e\x08\x17\x99\x85\xf8!\xc1H\x04\x12\x8b$ \xc9\x88\x14Q\x22K\x915H1R\x8aT UH\x1d\xf2=r\x029\x87\x5cF\xba\x91;\xc8\x002\x82\xfc\x86\xbcG1\x94\x81\xb2Q=\xd4\x0c\xb5C\xb9\xa87\x1a\x84F\xa2\x0b\xd0dt1\x9a\x8f\x16\xa0\x9b\xd0r\xb4\x1a=\x8c6\xa1\xe7\xd0\xabh\x0f\xda\x8f>C\xc70\xc0\xe8\x18\x073\xc4l0.\xc6\xc3B\xb18,\x09\x93c\xcb\xb1\x22\xac\x0c\xab\xc6\x1a\xb0V\xac\x03\xbb\x89\xf5c\xcf\xb1w\x04\x12\x81E\xc0\x096\x04wB a\x1eAHXLXN\xd8H\xa8 \x1c$4\x11\xda\x097\x09\x03\x84Q\xc2'\x22\x93\xa8K\xb4&\xba\x11\xf9\xc4\x18b21\x87XH,#\xd6\x12\x8f\x13/\x10{\x88C\xc47$\x12\x89C2'\xb9\x90\x02I\xb1\xa4T\xd2\x12\xd2F\xd2nR#\xe9,\xa9\x9b4H\x1a#\x93\xc9\xdadk\xb2\x079\x94, +\xc8\x85\xe4\x9d\xe4\xc3\xe43\xe4\x1b\xe4!\xf2[\x0a\x9db@q\xa4\xf8S\xe2(R\xcajJ\x19\xe5\x10\xe54\xe5\x06e\x982AU\xa3\x9aR\xdd\xa8\xa1T\x115\x8fZB\xad\xa1\xb6R\xafQ\x87\xa8\x134u\x9a9\xcd\x83\x16IK\xa5\xad\xa2\x95\xd3\x1ah\x17h\xf7i\xaf\xe8t\xba\x11\xdd\x95\x1eN\x97\xd0W\xd2\xcb\xe9G\xe8\x97\xe8\x03\xf4w\x0c\x0d\x86\x15\x83\xc7\x88g(\x19\x9b\x18\x07\x18g\x19w\x18\xaf\x98L\xa6\x19\xd3\x8b\x19\xc7T071\xeb\x98\xe7\x99\x0f\x99oUX*\xb6*|\x15\x91\xca\x0a\x95J\x95&\x95\x1b*/T\xa9\xaa\xa6\xaa\xde\xaa\x0bU\xf3U\xcbT\x8f\xa9^S}\xaeFU3S\xe3\xa9\x09\xd4\x96\xabU\xaa\x9dP\xebS\x1bSg\xa9;\xa8\x87\xaag\xa8oT?\xa4~Y\xfd\x89\x06Y\xc3L\xc3OC\xa4Q\xa0\xb1_\xe3\xbc\xc6 \x0bc\x19\xb3x,!k\x0d\xab\x86u\x815\xc4&\xb1\xcd\xd9|v*\xbb\x98\xfd\x1d\xbb\x8b=\xaa\xa9\xa19C3J3W\xb3R\xf3\x94f?\x07\xe3\x98q\xf8\x9ctN\x09\xe7(\xa7\x97\xf3~\x8a\xde\x14\xef)\xe2)\x1b\xa64L\xb91e\x5ck\xaa\x96\x97\x96X\xabH\xabQ\xabG\xeb\xbd6\xae\xed\xa7\x9d\xa6\xbdE\xbbY\xfb\x81\x0eA\xc7J'\x5c'Gg\x8f\xce\x05\x9d\xe7S\xd9S\xdd\xa7\x0a\xa7\x16M=:\xf5\xae.\xaak\xa5\x1b\xa1\xbbDw\xbfn\xa7\xee\x98\x9e\xbe^\x80\x9eLo\xa7\xdey\xbd\xe7\xfa\x1c}/\xfdT\xfdm\xfa\xa7\xf5G\x0cX\x06\xb3\x0c$\x06\xdb\x0c\xce\x18<\xc55qo<\x1d/\xc7\xdb\xf1QC]\xc3@C\xa5a\x95a\x97\xe1\x84\x91\xb9\xd1<\xa3\xd5F\x8dF\x0f\x8ci\xc6\x5c\xe3$\xe3m\xc6m\xc6\xa3&\x06&!&KM\xeaM\xee\x9aRM\xb9\xa6)\xa6;L;L\xc7\xcd\xcc\xcd\xa2\xcd\xd6\x995\x9b=1\xd72\xe7\x9b\xe7\x9b\xd7\x9b\xdf\xb7`ZxZ,\xb6\xa8\xb6\xb8eI\xb2\xe4Z\xa6Y\xee\xb6\xbcn\x85Z9Y\xa5XUZ]\xb3F\xad\x9d\xad%\xd6\xbb\xad\xbb\xa7\x11\xa7\xb9N\x93N\xab\x9e\xd6g\xc3\xb0\xf1\xb6\xc9\xb6\xa9\xb7\x19\xb0\xe5\xd8\x06\xdb\xae\xb6m\xb6}agb\x17g\xb7\xc5\xae\xc3\xee\x93\xbd\x93}\xba}\x8d\xfd=\x07\x0d\x87\xd9\x0e\xab\x1dZ\x1d~s\xb4r\x14:V:\xde\x9a\xce\x9c\xee?}\xc5\xf4\x96\xe9/gX\xcf\x10\xcf\xd83\xe3\xb6\x13\xcb)\xc4i\x9dS\x9b\xd3Gg\x17g\xb9s\x83\xf3\x88\x8b\x89K\x82\xcb.\x97>.\x9b\x1b\xc6\xdd\xc8\xbd\xe4Jt\xf5q]\xe1z\xd2\xf5\x9d\x9b\xb3\x9b\xc2\xed\xa8\xdb\xaf\xee6\xeei\xee\x87\xdc\x9f\xcc4\x9f)\x9eY3s\xd0\xc3\xc8C\xe0Q\xe5\xd1?\x0b\x9f\x950k\xdf\xac~OCO\x81g\xb5\xe7#/c/\x91W\xad\xd7\xb0\xb7\xa5w\xaa\xf7a\xef\x17>\xf6>r\x9f\xe3>\xe3<7\xde2\xdeY_\xcc7\xc0\xb7\xc8\xb7\xcbO\xc3o\x9e_\x85\xdfC\x7f#\xffd\xffz\xff\xd1\x00\xa7\x80%\x01g\x03\x89\x81A\x81[\x02\xfb\xf8z|!\xbf\x8e?:\xdbe\xf6\xb2\xd9\xedA\x8c\xa0\xb9A\x15A\x8f\x82\xad\x82\xe5\xc1\xad!h\xc8\xec\x90\xad!\xf7\xe7\x98\xce\x91\xcei\x0e\x85P~\xe8\xd6\xd0\x07a\xe6a\x8b\xc3~\x0c'\x85\x87\x85W\x86?\x8ep\x88X\x1a\xd11\x975w\xd1\xdcCs\xdfD\xfaD\x96D\xde\x9bg1O9\xaf-J5*>\xaa.j<\xda7\xba4\xba?\xc6.fY\xcc\xd5X\x9dXIlK\x1c9.*\xae6nl\xbe\xdf\xfc\xed\xf3\x87\xe2\x9d\xe2\x0b\xe3{\x17\x98/\xc8]py\xa1\xce\xc2\xf4\x85\xa7\x16\xa9.\x12,:\x96@L\x88N8\x94\xf0A\x10*\xa8\x16\x8c%\xf2\x13w%\x8e\x0ay\xc2\x1d\xc2g\x22/\xd16\xd1\x88\xd8C\x5c*\x1eN\xf2H*Mz\x92\xec\x91\xbc5y$\xc53\xa5,\xe5\xb9\x84'\xa9\x90\xbcL\x0dL\xdd\x9b:\x9e\x16\x9av m2=:\xbd1\x83\x92\x91\x90qB\xaa!M\x93\xb6g\xeag\xe6fv\xcb\xace\x85\xb2\xfe\xc5n\x8b\xb7/\x1e\x95\x07\xc9k\xb3\x90\xac\x05Y-\x0a\xb6B\xa6\xe8TZ(\xd7*\x07\xb2geWf\xbf\xcd\x89\xca9\x96\xab\x9e+\xcd\xed\xcc\xb3\xca\xdb\x907\x9c\xef\x9f\xff\xed\x12\xc2\x12\xe1\x92\xb6\xa5\x86KW-\x1dX\xe6\xbd\xacj9\xb2<qy\xdb\x0a\xe3\x15\x05+\x86V\x06\xac<\xb8\x8a\xb6*m\xd5O\xab\xedW\x97\xae~\xbd&zMk\x81^\xc1\xca\x82\xc1\xb5\x01k\xeb\x0bU\x0a\xe5\x85}\xeb\xdc\xd7\xed]OX/Y\xdf\xb5a\xfa\x86\x9d\x1b>\x15\x89\x8a\xae\x14\xdb\x17\x97\x15\x7f\xd8(\xdcx\xe5\x1b\x87o\xca\xbf\x99\xdc\x94\xb4\xa9\xab\xc4\xb9d\xcff\xd2f\xe9\xe6\xde-\x9e[\x0e\x96\xaa\x97\xe6\x97\x0en\x0d\xd9\xda\xb4\x0d\xdfV\xb4\xed\xf5\xf6E\xdb/\x97\xcd(\xdb\xbb\x83\xb6C\xb9\xa3\xbf<\xb8\xbce\xa7\xc9\xce\xcd;?T\xa4T\xf4T\xfaT6\xee\xd2\xdd\xb5a\xd7\xf8n\xd1\xee\x1b{\xbc\xf64\xec\xd5\xdb[\xbc\xf7\xfd>\xc9\xbe\xdbU\x01UM\xd5f\xd5e\xfbI\xfb\xb3\xf7?\xae\x89\xaa\xe9\xf8\x96\xfbm]\xadNmq\xed\xc7\x03\xd2\x03\xfd\x07#\x0e\xb6\xd7\xb9\xd4\xd5\x1d\xd2=TR\x8f\xd6+\xebG\x0e\xc7\x1f\xbe\xfe\x9d\xefw-\x0d6\x0dU\x8d\x9c\xc6\xe2#pDy\xe4\xe9\xf7\x09\xdf\xf7\x1e\x0d:\xdav\x8c{\xac\xe1\x07\xd3\x1fv\x1dg\x1d/jB\x9a\xf2\x9aF\x9bS\x9a\xfb[b[\xbaO\xcc>\xd1\xd6\xea\xdez\xfcG\xdb\x1f\x0f\x9c4<YyJ\xf3T\xc9i\xda\xe9\x82\xd3\x93g\xf2\xcf\x8c\x9d\x95\x9d}~.\xf9\xdc`\xdb\xa2\xb6{\xe7c\xce\xdfj\x0fo\xef\xba\x10t\xe1\xd2E\xff\x8b\xe7;\xbc;\xce\x5c\xf2\xb8t\xf2\xb2\xdb\xe5\x13W\xb8W\x9a\xaf:_m\xeat\xea<\xfe\x93\xd3O\xc7\xbb\x9c\xbb\x9a\xae\xb9\x5ck\xb9\xeez\xbd\xb5{f\xf7\xe9\x1b\x9e7\xce\xdd\xf4\xbdy\xf1\x16\xff\xd6\xd5\x9e9=\xdd\xbd\xf3zo\xf7\xc5\xf7\xf5\xdf\x16\xdd~r'\xfd\xce\xcb\xbb\xd9w'\xee\xad\xbcO\xbc_\xf4@\xedA\xd9C\xdd\x87\xd5?[\xfe\xdc\xd8\xef\xdc\x7fj\xc0w\xa0\xf3\xd1\xdcG\xf7\x06\x85\x83\xcf\xfe\x91\xf5\x8f\x0fC\x05\x8f\x99\x8f\xcb\x86\x0d\x86\xeb\x9e8>99\xe2?r\xfd\xe9\xfc\xa7C\xcfd\xcf&\x9e\x17\xfe\xa2\xfe\xcb\xae\x17\x16/~\xf8\xd5\xeb\xd7\xce\xd1\x98\xd1\xa1\x97\xf2\x97\x93\xbfm|\xa5\xfd\xea\xc0\xeb\x19\xaf\xdb\xc6\xc2\xc6\x1e\xbe\xc9x31^\xf4V\xfb\xed\xc1w\xdcw\x1d\xef\xa3\xdf\x0fO\xe4| \x7f(\xffh\xf9\xb1\xf5S\xd0\xa7\xfb\x93\x19\x93\x93\xff\x04\x03\x98\xf3\xfcc3-\xdb\x00\x00;\xdciTXtXML:com.adobe.xmp\x00\x00\x00\x00\x00<?xpacket begin=\x22\xef\xbb\xbf\x22 id=\x22W5M0MpCehiHzreSzNTczkc9d\x22?>\x0a<x:xmpmeta xmlns:x=\x22adobe:ns:meta/\x22 x:xmptk=\x22Adobe XMP Core 5.6-c067 79.157747, 2015/03/30-23:40:42 \x22>\x0a <rdf:RDF xmlns:rdf=\x22http://www.w3.org/1999/02/22-rdf-syntax-ns#\x22>\x0a <rdf:Description rdf:about=\x22\x22\x0a xmlns:xmp=\x22http://ns.adobe.com/xap/1.0/\x22\x0a xmlns:dc=\x22http://purl.org/dc/elements/1.1/\x22\x0a xmlns:photoshop=\x22http://ns.adobe.com/photoshop/1.0/\x22\x0a xmlns:xmpMM=\x22http://ns.adobe.com/xap/1.0/mm/\x22\x0a xmlns:stEvt=\x22http://ns.adobe.com/xap/1.0/sType/ResourceEvent#\x22\x0a xmlns:tiff=\x22http://ns.adobe.com/tiff/1.0/\x22\x0a xmlns:exif=\x22http://ns.adobe.com/exif/1.0/\x22>\x0a <xmp:CreatorTool>Adobe Photoshop CC 2015 (Windows)</xmp:CreatorTool>\x0a <xmp:CreateDate>2015-08-29T23:39:59-07:00</xmp:CreateDate>\x0a <xmp:ModifyDate>2015-09-04T21:48:31-07:00</xmp:ModifyDate>\x0a <xmp:MetadataDate>2015-09-04T21:48:31-07:00</xmp:MetadataDate>\x0a <dc:format>image/png</dc:format>\x0a <photoshop:ColorMode>3</photoshop:ColorMode>\x0a <photoshop:ICCProfile>sRGB IEC61966-2.1</photoshop:ICCProfile>\x0a <xmpMM:InstanceID>xmp.iid:4305d315-0640-b141-988d-d6d3599473f2</xmpMM:InstanceID>\x0a <xmpMM:DocumentID>xmp.did:6394dc35-fb49-0a43-89e7-d6429c67ab3b</xmpMM:DocumentID>\x0a <xmpMM:OriginalDocumentID>xmp.did:6394dc35-fb49-0a43-89e7-d6429c67ab3b</xmpMM:OriginalDocumentID>\x0a <xmpMM:History>\x0a <rdf:Seq>\x0a <rdf:li rdf:parseType=\x22Resource\x22>\x0a <stEvt:action>created</stEvt:action>\x0a <stEvt:instanceID>xmp.iid:6394dc35-fb49-0a43-89e7-d6429c67ab3b</stEvt:instanceID>\x0a <stEvt:when>2015-08-29T23:39:59-07:00</stEvt:when>\x0a <stEvt:softwareAgent>Adobe Photoshop CC 2015 (Windows)</stEvt:softwareAgent>\x0a </rdf:li>\x0a <rdf:li rdf:parseType=\x22Resource\x22>\x0a <stEvt:action>saved</stEvt:action>\x0a <stEvt:instanceID>xmp.iid:18e69211-a666-2a45-a753-722ead29318a</stEvt:instanceID>\x0a <stEvt:when>2015-09-04T13:46:14-07:00</stEvt:when>\x0a <stEvt:softwareAgent>Adobe Photoshop CC 2015 (Windows)</stEvt:softwareAgent>\x0a <stEvt:changed>/</stEvt:changed>\x0a </rdf:li>\x0a <rdf:li rdf:parseType=\x22Resource\x22>\x0a <stEvt:action>saved</stEvt:action>\x0a <stEvt:instanceID>xmp.iid:4305d315-0640-b141-988d-d6d3599473f2</stEvt:instanceID>\x0a <stEvt:when>2015-09-04T21:48:31-07:00</stEvt:when>\x0a <stEvt:softwareAgent>Adobe Photoshop CC 2015 (Windows)</stEvt:softwareAgent>\x0a <stEvt:changed>/</stEvt:changed>\x0a </rdf:li>\x0a </rdf:Seq>\x0a </xmpMM:History>\x0a <tiff:Orientation>1</tiff:Orientation>\x0a <tiff:XResolution>720000/10000</tiff:XResolution>\x0a <tiff:YResolution>720000/10000</tiff:YResolution>\x0a <tiff:ResolutionUnit>2</tiff:ResolutionUnit>\x0a <exif:ColorSpace>1</exif:ColorSpace>\x0a <exif:PixelXDimension>48</exif:PixelXDimension>\x0a <exif:PixelYDimension>48</exif:PixelYDimension>\x0a </rdf:Description>\x0a </rdf:RDF>\x0a</x:xmpmeta>\x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a<?xpacket end=\x22w\x22?>\x15mG\xba\x00\x00\x00 cHRM\x00\x00z%\x00\x00\x80\x83\x00\x00\xf9\xff\x00\x00\x80\xe9\x00\x00u0\x00\x00\xea`\x00\x00:\x98\x00\x00\x17o\x92_\xc5F\x00\x00\x02\xd2IDATx\xda\xec\x99\xbfk\x1aa\x18\xc7=\x84\xeab\x22\x0a\x0e=\x12\x08\xc9G\xd2%\x19\x0c\x04\xe9\x18H\x84,v\xf1\x1fph\x1dtr\x91\x0cu\x88\xe2\xe4\x12h]\x0c\x99\xe2&\x1228\x94R\x88\xd0!\x85Xh\x13\xa14C\xdbT\x1aR\xdaB\x0b\x91\xb4y\xba\x18(ik\xee\xce;S\xad\x0f<\xcbqw\xef\xfb\xb9{\xdf\xef\xfb\xfc\xb0\x89\x88\xad\x9f\xdd6\x04\x18\x02\x5c3\x00`\x03F\x81\x10\xb0\x02\x94\x80]\xa0\x09\xb4\xda\xdel_+\xb5\xef\x09\xb5\x9f\xb9^\x00 \x08d&gf\xf7\xd4pD\xbc\x89\x94\xb8r\x05q\x16+b/\xd7D\xa9\xd6E\xa9\xd6\xc5^\xae\x89\xb3X\x11W\xae \xdeDJ\xd4pD&gf\xf7\x80\x0c\x10\xec9\x00\x10\x00\xf2c\x8b\xcb'\x9edZ\x1c\xeb[b{\xf4B\x97;\xd6\xb7\xc4\x93L\xcb\xd8\xe2\xf2\x09\x90\x07\x02\x96\x03\x00\x0a\x10\x1b_Xj\xb8SY\xb1\x97k\xba'~\xd9\xed\xe5\x9a\xb8SY\x19_Xj\x001@\xb1\x04\x00P\xf1\xfb\xf3\xbeh\x5c\x1c\x1b\xdb]O\xfc\xb7?\xb2\xb1-\xbeh\x5c\xf0\xfb\xf3\x80j*\x000517\xbf\xe9NeM\x9f\xf8ew\xa7\xb2217\xbf\x09L\x99\x02\x00\xa8\x13s\xf3\x9b#\xabk\x7f\x1c\xb0|\xfcEn\xee4L\x85\x18Y]\xbb\x80P\xbb\x02\x00\x14\xfc\xfe|\xa7//\x22\xf2\xf9\xec\xbb\xdc;x/\x8a\xc9\x7f\xa2\xbd\x9c\x94n\x00b\xbeh\xbc\xe3@\xbf\xda\xce\xa7or\xeb\xe9+\xd3 |\xd1\xb8\x001C\x00@`|a\xa9q\xd5\x86\xbdl\xad\x1f\xe7\x92><\x96\x1b\x8f_\x9a\xb2\xb1\xdb\xea\x140\x02\x90\xd7\xb2i\xfff\xfb_O\xe5\xf6\xb3Cs\x96\x12\xe4u\x01\x00\xc1\xb1\xc5\xe5\x13-:\xdf\xc9\xceE\xe4\xe1\xbb\x8f2\xf2d\xbf\xabs\xa2}\xd8\x05\xf5\x00d<\xc9\xb4\xa6\x01\xb4\xd8\xd1\xe9\x99\xdcy\xfe\xc60\x84'\x99\x16 \xa3\x09\x00\x18\x9d\x9c\x99\xdd\xd3\x1a\x1e\xe81\xa3\x92\xebX\xdf\xba\x88\x9dF\xb5\x00\x84\xd4pD\xf3\xcb\xf5\x9aQ\xc9U\xc3\x11\x01BZ\x00V\xbc\x89\x94e\x00F%\xd7\x9bH\x09\xb0\xa2\x05\xa0\xe4\xca\x15,\x07\xd0+\xb9\xae\x5cA\x80\x92\x16\x80]g\xb1\xd2\x13\x00=\x92\xeb,V\x04\xd8\xd5\x02\xd0\xd4\x13&\x9beWI\xae\xbd\x5c\x13\xa0\xa9\x05\xa0\xa5T\xeb=\x07\xb8Jr\x95j]\x80\xd6\x7f\x01pmK\xe8\xc1[s\x96P\xdfo\xe2\x9e\xca\xe8\xfd\xd7\x1fL\x97\xd1\x9e\x1dd\xd3\x16\x1dd\x96\x87\x12w\x0f\x8e,\x0d%\xfa;\x98\xeb\xfbp\xda\xec\x84\xa6\x934Z\x96\xd0\xf4}J\xd9mR\xafU\x1a-M\xea\x8d\x96U\xa6\xff\x95\xb2\x8a\xde\xc2\x96\x11i\xb4\xbc\xb0\xd5\xf7\xa5\xc5\x81(\xee\x0eDy} \x1a\x1c\x03\xd1b\x1a\x98&\xdf\xc0\xb4Y\x87\x9d\xfa!\xc0\x00\x00\xfc\x1c\x00\x8b\xfe7\xf2)i0p\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x00w\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x18\x00\x00\x00\x18\x08\x04\x00\x00\x00J~\xf5s\x00\x00\x00>IDAT8\xcbc`\x18\xac\xe0\x7f\xe0\xff\x97\xff\xf1\x81W\xff\xfdQ5\xe0W\x0e\x02/Q5\x00\x01^\x17\xa0\xcb\x8fj\x18\xd50p\x1a^\x11L\xde/P5\xf8\x13\xc8\x11/\xfe\xfb\x0e\xda\xdc\x0f\x00\x01\x17\xfb\x91Q@\x9d\x1a\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00;\x14\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x000\x00\x00\x000\x08\x04\x00\x00\x00\xfd\x0b1\x0c\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x009\xeciTXtXML:com.adobe.xmp\x00\x00\x00\x00\x00<?xpacket begin=\x22\xef\xbb\xbf\x22 id=\x22W5M0MpCehiHzreSzNTczkc9d\x22?>\x0a<x:xmpmeta xmlns:x=\x22adobe:ns:meta/\x22 x:xmptk=\x22Adobe XMP Core 5.6-c067 79.157747, 2015/03/30-23:40:42 \x22>\x0a <rdf:RDF xmlns:rdf=\x22http://www.w3.org/1999/02/22-rdf-syntax-ns#\x22>\x0a <rdf:Description rdf:about=\x22\x22\x0a xmlns:xmp=\x22http://ns.adobe.com/xap/1.0/\x22\x0a xmlns:dc=\x22http://purl.org/dc/elements/1.1/\x22\x0a xmlns:photoshop=\x22http://ns.adobe.com/photoshop/1.0/\x22\x0a xmlns:xmpMM=\x22http://ns.adobe.com/xap/1.0/mm/\x22\x0a xmlns:stEvt=\x22http://ns.adobe.com/xap/1.0/sType/ResourceEvent#\x22\x0a xmlns:tiff=\x22http://ns.adobe.com/tiff/1.0/\x22\x0a xmlns:exif=\x22http://ns.adobe.com/exif/1.0/\x22>\x0a <xmp:CreatorTool>Adobe Photoshop CC 2015 (Windows)</xmp:CreatorTool>\x0a <xmp:CreateDate>2015-08-29T23:39:59-07:00</xmp:CreateDate>\x0a <xmp:ModifyDate>2015-09-04T22:18:46-07:00</xmp:ModifyDate>\x0a <xmp:MetadataDate>2015-09-04T22:18:46-07:00</xmp:MetadataDate>\x0a <dc:format>image/png</dc:format>\x0a <photoshop:ColorMode>1</photoshop:ColorMode>\x0a <xmpMM:InstanceID>xmp.iid:fb28de83-1ac8-7444-afaf-4a8f1fc517b2</xmpMM:InstanceID>\x0a <xmpMM:DocumentID>adobe:docid:photoshop:8e6d904a-538d-11e5-864d-c7d3ee06def3</xmpMM:DocumentID>\x0a <xmpMM:OriginalDocumentID>xmp.did:cd0e85a1-45b4-194b-830d-0e19aa21d4b3</xmpMM:OriginalDocumentID>\x0a <xmpMM:History>\x0a <rdf:Seq>\x0a <rdf:li rdf:parseType=\x22Resource\x22>\x0a <stEvt:action>created</stEvt:action>\x0a <stEvt:instanceID>xmp.iid:cd0e85a1-45b4-194b-830d-0e19aa21d4b3</stEvt:instanceID>\x0a <stEvt:when>2015-08-29T23:39:59-07:00</stEvt:when>\x0a <stEvt:softwareAgent>Adobe Photoshop CC 2015 (Windows)</stEvt:softwareAgent>\x0a </rdf:li>\x0a <rdf:li rdf:parseType=\x22Resource\x22>\x0a <stEvt:action>saved</stEvt:action>\x0a <stEvt:instanceID>xmp.iid:fb28de83-1ac8-7444-afaf-4a8f1fc517b2</stEvt:instanceID>\x0a <stEvt:when>2015-09-04T22:18:46-07:00</stEvt:when>\x0a <stEvt:softwareAgent>Adobe Photoshop CC 2015 (Windows)</stEvt:softwareAgent>\x0a <stEvt:changed>/</stEvt:changed>\x0a </rdf:li>\x0a </rdf:Seq>\x0a </xmpMM:History>\x0a <tiff:Orientation>1</tiff:Orientation>\x0a <tiff:XResolution>720000/10000</tiff:XResolution>\x0a <tiff:YResolution>720000/10000</tiff:YResolution>\x0a <tiff:ResolutionUnit>2</tiff:ResolutionUnit>\x0a <exif:ColorSpace>65535</exif:ColorSpace>\x0a <exif:PixelXDimension>48</exif:PixelXDimension>\x0a <exif:PixelYDimension>48</exif:PixelYDimension>\x0a </rdf:Description>\x0a </rdf:RDF>\x0a</x:xmpmeta>\x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a<?xpacket end=\x22w\x22?>\x9f\xb7l\x19\x00\x00\x00 cHRM\x00\x00z%\x00\x00\x80\x83\x00\x00\xf9\xff\x00\x00\x80\xe9\x00\x00u0\x00\x00\xea`\x00\x00:\x98\x00\x00\x17o\x92_\xc5F\x00\x00\x00\xa2IDATx\xda\xec\xd61\x12\xc20\x0cD\xd1\x7f-\x13.\xc6\x0c\x85\xe1\xc8p\x80\xa5\xa2H\x22\xa7\xda\xed$\xf7\xfbf\xac\x91eD\xf6\xd0@\x03\x0d4`\x01\x0045)\xcb\x02hJRM8\xae\xe8\xa1\x7f\xcd\x0cp\xd7wMx\xae\xe8\x82p5yI\x98\x00\xd0V\x136\x00\xb4\xe9s&\x8c@MX\x81\x8a0\x03\xa0\xdb\x9e\xb0\x03G\x22\x01\x8c(\xb0\x8f\xf77yD\x9b|\x8e\xf7\x0e\xda\x88\x0eZ\x1d\xef{\xec\x16\xf1\xae\xe7z\x19\xefY8\x17\xf1\x1e\xe0\x19^\x99\xa0wt\xe9\x03\xe8\x15\xfc\xb6\xf4\xd7\xb1\x81\x06\x1a\x88\x03\xbf\x01\x00<\x8d\x9e\x0c1\x96\xdf\xe9\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x00\xc7\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x000\x00\x00\x000\x08\x04\x00\x00\x00\xfd\x0b1\x0c\x00\x00\x00\x8eIDATX\x85\xed\xd3\xbb\x0d\x80 \x14\x85a:K\xc7\xf1\xb9\x98\x1dk\xb9\x81\xe3\xf8\x18\xe0\xd8\x10\x83\x80\x15\xf7\x181\xf7\xf4|\x7f\x02\xc1\x18\x9dN\xa7{w\x181q\xf9\x03\x80\xe5\xf2 %<\x9e\x11(\x9c\x1f\x94\xff/\xbf\x7f\x95G\x8d\x19\x0d\x93_\x00\xac\xcf\x89<\xbe\xc2\xe2\x8en\xe9D\xf6\xdd\xc3^\xc7\x13\x09\xf4\x02O{K\xb4\xe2\xfcsB\x8c\x8f\x12\x9d8\x1f'\xc4\xf9 \xb1\x93~\xad\x97`\xf0\x89\x844\x1f$\x18\xbc\x97`\xf1.\xc1\xe4u:]\x01;\x01\xd3\x80\x9c\x13\xb7\xca\xc0k\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00L\x0e\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x008\x00\x00\x008\x08\x06\x00\x00\x00\xa8\x86;\x1e\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x0aOiCCPPhotoshop ICC profile\x00\x00x\xda\x9dSgTS\xe9\x16=\xf7\xde\xf4BK\x88\x80\x94KoR\x15\x08 RB\x8b\x80\x14\x91&*!\x09\x10J\x88!\xa1\xd9\x15Q\xc1\x11EE\x04\x1b\xc8\xa0\x88\x03\x8e\x8e\x80\x8c\x15Q,\x0c\x8a\x0a\xd8\x07\xe4!\xa2\x8e\x83\xa3\x88\x8a\xca\xfb\xe1{\xa3k\xd6\xbc\xf7\xe6\xcd\xfe\xb5\xd7>\xe7\xac\xf3\x9d\xb3\xcf\x07\xc0\x08\x0c\x96H3Q5\x80\x0c\xa9B\x1e\x11\xe0\x83\xc7\xc4\xc6\xe1\xe4.@\x81\x0a$p\x00\x10\x08\xb3d!s\xfd#\x01\x00\xf8~<<+\x22\xc0\x07\xbe\x00\x01x\xd3\x0b\x08\x00\xc0M\x9b\xc00\x1c\x87\xff\x0f\xeaB\x99\x5c\x01\x80\x84\x01\xc0t\x918K\x08\x80\x14\x00@z\x8eB\xa6\x00@F\x01\x80\x9d\x98&S\x00\xa0\x04\x00`\xcbcb\xe3\x00P-\x00`'\x7f\xe6\xd3\x00\x80\x9d\xf8\x99{\x01\x00[\x94!\x15\x01\xa0\x91\x00 \x13e\x88D\x00h;\x00\xac\xcfV\x8aE\x00X0\x00\x14fK\xc49\x00\xd8-\x000IWfH\x00\xb0\xb7\x00\xc0\xce\x10\x0b\xb2\x00\x08\x0c\x000Q\x88\x85)\x00\x04{\x00`\xc8##x\x00\x84\x99\x00\x14F\xf2W<\xf1+\xae\x10\xe7*\x00\x00x\x99\xb2<\xb9$9E\x81[\x08-q\x07WW.\x1e(\xceI\x17+\x146a\x02a\x9a@.\xc2y\x99\x192\x814\x0f\xe0\xf3\xcc\x00\x00\xa0\x91\x15\x11\xe0\x83\xf3\xfdx\xce\x0e\xae\xce\xce6\x8e\xb6\x0e_-\xea\xbf\x06\xff\x22bb\xe3\xfe\xe5\xcf\xabp@\x00\x00\xe1t~\xd1\xfe,/\xb3\x1a\x80;\x06\x80m\xfe\xa2%\xee\x04h^\x0b\xa0u\xf7\x8bf\xb2\x0f@\xb5\x00\xa0\xe9\xdaW\xf3p\xf8~<<E\xa1\x90\xb9\xd9\xd9\xe5\xe4\xe4\xd8J\xc4B[a\xcaW}\xfeg\xc2_\xc0W\xfdl\xf9~<\xfc\xf7\xf5\xe0\xbe\xe2$\x812]\x81G\x04\xf8\xe0\xc2\xcc\xf4L\xa5\x1c\xcf\x92\x09\x84b\xdc\xe6\x8fG\xfc\xb7\x0b\xff\xfc\x1d\xd3\x22\xc4Ib\xb9X*\x14\xe3Q\x12q\x8eD\x9a\x8c\xf32\xa5\x22\x89B\x92)\xc5%\xd2\xffd\xe2\xdf,\xfb\x03>\xdf5\x00\xb0j>\x01{\x91-\xa8]c\x03\xf6K'\x10Xt\xc0\xe2\xf7\x00\x00\xf2\xbbo\xc1\xd4(\x08\x03\x80h\x83\xe1\xcfw\xff\xef?\xfdG\xa0%\x00\x80fI\x92q\x00\x00^D$.T\xca\xb3?\xc7\x08\x00\x00D\xa0\x81*\xb0A\x1b\xf4\xc1\x18,\xc0\x06\x1c\xc1\x05\xdc\xc1\x0b\xfc`6\x84B$\xc4\xc2B\x10B\x0ad\x80\x1cr`)\xac\x82B(\x86\xcd\xb0\x1d*`/\xd4@\x1d4\xc0Qh\x86\x93p\x0e.\xc2U\xb8\x0e=p\x0f\xfaa\x08\x9e\xc1(\xbc\x81\x09\x04A\xc8\x08\x13a!\xda\x88\x01b\x8aX#\x8e\x08\x17\x99\x85\xf8!\xc1H\x04\x12\x8b$ \xc9\x88\x14Q\x22K\x915H1R\x8aT UH\x1d\xf2=r\x029\x87\x5cF\xba\x91;\xc8\x002\x82\xfc\x86\xbcG1\x94\x81\xb2Q=\xd4\x0c\xb5C\xb9\xa87\x1a\x84F\xa2\x0b\xd0dt1\x9a\x8f\x16\xa0\x9b\xd0r\xb4\x1a=\x8c6\xa1\xe7\xd0\xabh\x0f\xda\x8f>C\xc70\xc0\xe8\x18\x073\xc4l0.\xc6\xc3B\xb18,\x09\x93c\xcb\xb1\x22\xac\x0c\xab\xc6\x1a\xb0V\xac\x03\xbb\x89\xf5c\xcf\xb1w\x04\x12\x81E\xc0\x096\x04wB a\x1eAHXLXN\xd8H\xa8 \x1c$4\x11\xda\x097\x09\x03\x84Q\xc2'\x22\x93\xa8K\xb4&\xba\x11\xf9\xc4\x18b21\x87XH,#\xd6\x12\x8f\x13/\x10{\x88C\xc47$\x12\x89C2'\xb9\x90\x02I\xb1\xa4T\xd2\x12\xd2F\xd2nR#\xe9,\xa9\x9b4H\x1a#\x93\xc9\xdadk\xb2\x079\x94, +\xc8\x85\xe4\x9d\xe4\xc3\xe43\xe4\x1b\xe4!\xf2[\x0a\x9db@q\xa4\xf8S\xe2(R\xcajJ\x19\xe5\x10\xe54\xe5\x06e\x982AU\xa3\x9aR\xdd\xa8\xa1T\x115\x8fZB\xad\xa1\xb6R\xafQ\x87\xa8\x134u\x9a9\xcd\x83\x16IK\xa5\xad\xa2\x95\xd3\x1ah\x17h\xf7i\xaf\xe8t\xba\x11\xdd\x95\x1eN\x97\xd0W\xd2\xcb\xe9G\xe8\x97\xe8\x03\xf4w\x0c\x0d\x86\x15\x83\xc7\x88g(\x19\x9b\x18\x07\x18g\x19w\x18\xaf\x98L\xa6\x19\xd3\x8b\x19\xc7T071\xeb\x98\xe7\x99\x0f\x99oUX*\xb6*|\x15\x91\xca\x0a\x95J\x95&\x95\x1b*/T\xa9\xaa\xa6\xaa\xde\xaa\x0bU\xf3U\xcbT\x8f\xa9^S}\xaeFU3S\xe3\xa9\x09\xd4\x96\xabU\xaa\x9dP\xebS\x1bSg\xa9;\xa8\x87\xaag\xa8oT?\xa4~Y\xfd\x89\x06Y\xc3L\xc3OC\xa4Q\xa0\xb1_\xe3\xbc\xc6 \x0bc\x19\xb3x,!k\x0d\xab\x86u\x815\xc4&\xb1\xcd\xd9|v*\xbb\x98\xfd\x1d\xbb\x8b=\xaa\xa9\xa19C3J3W\xb3R\xf3\x94f?\x07\xe3\x98q\xf8\x9ctN\x09\xe7(\xa7\x97\xf3~\x8a\xde\x14\xef)\xe2)\x1b\xa64L\xb91e\x5ck\xaa\x96\x97\x96X\xabH\xabQ\xabG\xeb\xbd6\xae\xed\xa7\x9d\xa6\xbdE\xbbY\xfb\x81\x0eA\xc7J'\x5c'Gg\x8f\xce\x05\x9d\xe7S\xd9S\xdd\xa7\x0a\xa7\x16M=:\xf5\xae.\xaak\xa5\x1b\xa1\xbbDw\xbfn\xa7\xee\x98\x9e\xbe^\x80\x9eLo\xa7\xdey\xbd\xe7\xfa\x1c}/\xfdT\xfdm\xfa\xa7\xf5G\x0cX\x06\xb3\x0c$\x06\xdb\x0c\xce\x18<\xc55qo<\x1d/\xc7\xdb\xf1QC]\xc3@C\xa5a\x95a\x97\xe1\x84\x91\xb9\xd1<\xa3\xd5F\x8dF\x0f\x8ci\xc6\x5c\xe3$\xe3m\xc6m\xc6\xa3&\x06&!&KM\xeaM\xee\x9aRM\xb9\xa6)\xa6;L;L\xc7\xcd\xcc\xcd\xa2\xcd\xd6\x995\x9b=1\xd72\xe7\x9b\xe7\x9b\xd7\x9b\xdf\xb7`ZxZ,\xb6\xa8\xb6\xb8eI\xb2\xe4Z\xa6Y\xee\xb6\xbcn\x85Z9Y\xa5XUZ]\xb3F\xad\x9d\xad%\xd6\xbb\xad\xbb\xa7\x11\xa7\xb9N\x93N\xab\x9e\xd6g\xc3\xb0\xf1\xb6\xc9\xb6\xa9\xb7\x19\xb0\xe5\xd8\x06\xdb\xae\xb6m\xb6}agb\x17g\xb7\xc5\xae\xc3\xee\x93\xbd\x93}\xba}\x8d\xfd=\x07\x0d\x87\xd9\x0e\xab\x1dZ\x1d~s\xb4r\x14:V:\xde\x9a\xce\x9c\xee?}\xc5\xf4\x96\xe9/gX\xcf\x10\xcf\xd83\xe3\xb6\x13\xcb)\xc4i\x9dS\x9b\xd3Gg\x17g\xb9s\x83\xf3\x88\x8b\x89K\x82\xcb.\x97>.\x9b\x1b\xc6\xdd\xc8\xbd\xe4Jt\xf5q]\xe1z\xd2\xf5\x9d\x9b\xb3\x9b\xc2\xed\xa8\xdb\xaf\xee6\xeei\xee\x87\xdc\x9f\xcc4\x9f)\x9eY3s\xd0\xc3\xc8C\xe0Q\xe5\xd1?\x0b\x9f\x950k\xdf\xac~OCO\x81g\xb5\xe7#/c/\x91W\xad\xd7\xb0\xb7\xa5w\xaa\xf7a\xef\x17>\xf6>r\x9f\xe3>\xe3<7\xde2\xdeY_\xcc7\xc0\xb7\xc8\xb7\xcbO\xc3o\x9e_\x85\xdfC\x7f#\xffd\xffz\xff\xd1\x00\xa7\x80%\x01g\x03\x89\x81A\x81[\x02\xfb\xf8z|!\xbf\x8e?:\xdbe\xf6\xb2\xd9\xedA\x8c\xa0\xb9A\x15A\x8f\x82\xad\x82\xe5\xc1\xad!h\xc8\xec\x90\xad!\xf7\xe7\x98\xce\x91\xcei\x0e\x85P~\xe8\xd6\xd0\x07a\xe6a\x8b\xc3~\x0c'\x85\x87\x85W\x86?\x8ep\x88X\x1a\xd11\x975w\xd1\xdcCs\xdfD\xfaD\x96D\xde\x9bg1O9\xaf-J5*>\xaa.j<\xda7\xba4\xba?\xc6.fY\xcc\xd5X\x9dXIlK\x1c9.*\xae6nl\xbe\xdf\xfc\xed\xf3\x87\xe2\x9d\xe2\x0b\xe3{\x17\x98/\xc8]py\xa1\xce\xc2\xf4\x85\xa7\x16\xa9.\x12,:\x96@L\x88N8\x94\xf0A\x10*\xa8\x16\x8c%\xf2\x13w%\x8e\x0ay\xc2\x1d\xc2g\x22/\xd16\xd1\x88\xd8C\x5c*\x1eN\xf2H*Mz\x92\xec\x91\xbc5y$\xc53\xa5,\xe5\xb9\x84'\xa9\x90\xbcL\x0dL\xdd\x9b:\x9e\x16\x9av m2=:\xbd1\x83\x92\x91\x90qB\xaa!M\x93\xb6g\xeag\xe6fv\xcb\xace\x85\xb2\xfe\xc5n\x8b\xb7/\x1e\x95\x07\xc9k\xb3\x90\xac\x05Y-\x0a\xb6B\xa6\xe8TZ(\xd7*\x07\xb2geWf\xbf\xcd\x89\xca9\x96\xab\x9e+\xcd\xed\xcc\xb3\xca\xdb\x907\x9c\xef\x9f\xff\xed\x12\xc2\x12\xe1\x92\xb6\xa5\x86KW-\x1dX\xe6\xbd\xacj9\xb2<qy\xdb\x0a\xe3\x15\x05+\x86V\x06\xac<\xb8\x8a\xb6*m\xd5O\xab\xedW\x97\xae~\xbd&zMk\x81^\xc1\xca\x82\xc1\xb5\x01k\xeb\x0bU\x0a\xe5\x85}\xeb\xdc\xd7\xed]OX/Y\xdf\xb5a\xfa\x86\x9d\x1b>\x15\x89\x8a\xae\x14\xdb\x17\x97\x15\x7f\xd8(\xdcx\xe5\x1b\x87o\xca\xbf\x99\xdc\x94\xb4\xa9\xab\xc4\xb9d\xcff\xd2f\xe9\xe6\xde-\x9e[\x0e\x96\xaa\x97\xe6\x97\x0en\x0d\xd9\xda\xb4\x0d\xdfV\xb4\xed\xf5\xf6E\xdb/\x97\xcd(\xdb\xbb\x83\xb6C\xb9\xa3\xbf<\xb8\xbce\xa7\xc9\xce\xcd;?T\xa4T\xf4T\xfaT6\xee\xd2\xdd\xb5a\xd7\xf8n\xd1\xee\x1b{\xbc\xf64\xec\xd5\xdb[\xbc\xf7\xfd>\xc9\xbe\xdbU\x01UM\xd5f\xd5e\xfbI\xfb\xb3\xf7?\xae\x89\xaa\xe9\xf8\x96\xfbm]\xadNmq\xed\xc7\x03\xd2\x03\xfd\x07#\x0e\xb6\xd7\xb9\xd4\xd5\x1d\xd2=TR\x8f\xd6+\xebG\x0e\xc7\x1f\xbe\xfe\x9d\xefw-\x0d6\x0dU\x8d\x9c\xc6\xe2#pDy\xe4\xe9\xf7\x09\xdf\xf7\x1e\x0d:\xdav\x8c{\xac\xe1\x07\xd3\x1fv\x1dg\x1d/jB\x9a\xf2\x9aF\x9bS\x9a\xfb[b[\xbaO\xcc>\xd1\xd6\xea\xdez\xfcG\xdb\x1f\x0f\x9c4<YyJ\xf3T\xc9i\xda\xe9\x82\xd3\x93g\xf2\xcf\x8c\x9d\x95\x9d}~.\xf9\xdc`\xdb\xa2\xb6{\xe7c\xce\xdfj\x0fo\xef\xba\x10t\xe1\xd2E\xff\x8b\xe7;\xbc;\xce\x5c\xf2\xb8t\xf2\xb2\xdb\xe5\x13W\xb8W\x9a\xaf:_m\xeat\xea<\xfe\x93\xd3O\xc7\xbb\x9c\xbb\x9a\xae\xb9\x5ck\xb9\xeez\xbd\xb5{f\xf7\xe9\x1b\x9e7\xce\xdd\xf4\xbdy\xf1\x16\xff\xd6\xd5\x9e9=\xdd\xbd\xf3zo\xf7\xc5\xf7\xf5\xdf\x16\xdd~r'\xfd\xce\xcb\xbb\xd9w'\xee\xad\xbcO\xbc_\xf4@\xedA\xd9C\xdd\x87\xd5?[\xfe\xdc\xd8\xef\xdc\x7fj\xc0w\xa0\xf3\xd1\xdcG\xf7\x06\x85\x83\xcf\xfe\x91\xf5\x8f\x0fC\x05\x8f\x99\x8f\xcb\x86\x0d\x86\xeb\x9e8>99\xe2?r\xfd\xe9\xfc\xa7C\xcfd\xcf&\x9e\x17\xfe\xa2\xfe\xcb\xae\x17\x16/~\xf8\xd5\xeb\xd7\xce\xd1\x98\xd1\xa1\x97\xf2\x97\x93\xbfm|\xa5\xfd\xea\xc0\xeb\x19\xaf\xdb\xc6\xc2\xc6\x1e\xbe\xc9x31^\xf4V\xfb\xed\xc1w\xdcw\x1d\xef\xa3\xdf\x0fO\xe4| \x7f(\xffh\xf9\xb1\xf5S\xd0\xa7\xfb\x93\x19\x93\x93\xff\x04\x03\x98\xf3\xfcc3-\xdb\x00\x00:0iTXtXML:com.adobe.xmp\x00\x00\x00\x00\x00<?xpacket begin=\x22\xef\xbb\xbf\x22 id=\x22W5M0MpCehiHzreSzNTczkc9d\x22?>\x0a<x:xmpmeta xmlns:x=\x22adobe:ns:meta/\x22 x:xmptk=\x22Adobe XMP Core 5.6-c067 79.157747, 2015/03/30-23:40:42 \x22>\x0a <rdf:RDF xmlns:rdf=\x22http://www.w3.org/1999/02/22-rdf-syntax-ns#\x22>\x0a <rdf:Description rdf:about=\x22\x22\x0a xmlns:xmp=\x22http://ns.adobe.com/xap/1.0/\x22\x0a xmlns:xmpMM=\x22http://ns.adobe.com/xap/1.0/mm/\x22\x0a xmlns:stEvt=\x22http://ns.adobe.com/xap/1.0/sType/ResourceEvent#\x22\x0a xmlns:dc=\x22http://purl.org/dc/elements/1.1/\x22\x0a xmlns:photoshop=\x22http://ns.adobe.com/photoshop/1.0/\x22\x0a xmlns:tiff=\x22http://ns.adobe.com/tiff/1.0/\x22\x0a xmlns:exif=\x22http://ns.adobe.com/exif/1.0/\x22>\x0a <xmp:CreatorTool>Adobe Photoshop CC 2015 (Windows)</xmp:CreatorTool>\x0a <xmp:CreateDate>2015-09-05T14:53:23-07:00</xmp:CreateDate>\x0a <xmp:MetadataDate>2015-09-05T14:53:23-07:00</xmp:MetadataDate>\x0a <xmp:ModifyDate>2015-09-05T14:53:23-07:00</xmp:ModifyDate>\x0a <xmpMM:InstanceID>xmp.iid:32b73671-fcfd-484c-931d-39a3014fc796</xmpMM:InstanceID>\x0a <xmpMM:DocumentID>adobe:docid:photoshop:825e69d1-5418-11e5-864d-c7d3ee06def3</xmpMM:DocumentID>\x0a <xmpMM:OriginalDocumentID>xmp.did:4b570fd7-1d75-224b-a44d-ebedaeec12fb</xmpMM:OriginalDocumentID>\x0a <xmpMM:History>\x0a <rdf:Seq>\x0a <rdf:li rdf:parseType=\x22Resource\x22>\x0a <stEvt:action>created</stEvt:action>\x0a <stEvt:instanceID>xmp.iid:4b570fd7-1d75-224b-a44d-ebedaeec12fb</stEvt:instanceID>\x0a <stEvt:when>2015-09-05T14:53:23-07:00</stEvt:when>\x0a <stEvt:softwareAgent>Adobe Photoshop CC 2015 (Windows)</stEvt:softwareAgent>\x0a </rdf:li>\x0a <rdf:li rdf:parseType=\x22Resource\x22>\x0a <stEvt:action>saved</stEvt:action>\x0a <stEvt:instanceID>xmp.iid:32b73671-fcfd-484c-931d-39a3014fc796</stEvt:instanceID>\x0a <stEvt:when>2015-09-05T14:53:23-07:00</stEvt:when>\x0a <stEvt:softwareAgent>Adobe Photoshop CC 2015 (Windows)</stEvt:softwareAgent>\x0a <stEvt:changed>/</stEvt:changed>\x0a </rdf:li>\x0a </rdf:Seq>\x0a </xmpMM:History>\x0a <dc:format>image/png</dc:format>\x0a <photoshop:ColorMode>3</photoshop:ColorMode>\x0a <photoshop:ICCProfile>sRGB IEC61966-2.1</photoshop:ICCProfile>\x0a <tiff:Orientation>1</tiff:Orientation>\x0a <tiff:XResolution>720000/10000</tiff:XResolution>\x0a <tiff:YResolution>720000/10000</tiff:YResolution>\x0a <tiff:ResolutionUnit>2</tiff:ResolutionUnit>\x0a <exif:ColorSpace>1</exif:ColorSpace>\x0a <exif:PixelXDimension>56</exif:PixelXDimension>\x0a <exif:PixelYDimension>56</exif:PixelYDimension>\x0a </rdf:Description>\x0a </rdf:RDF>\x0a</x:xmpmeta>\x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a<?xpacket end=\x22w\x22?>Q(\x12\x8c\x00\x00\x00 cHRM\x00\x00z%\x00\x00\x80\x83\x00\x00\xf9\xff\x00\x00\x80\xe9\x00\x00u0\x00\x00\xea`\x00\x00:\x98\x00\x00\x17o\x92_\xc5F\x00\x00\x06\xfdIDATx\xda\xe4\x9a_LSW\x1c\xc7\x7f\x17\x11\xc1\x04\x88)\x06nSoz\xee\xbd\xe7\xd9G\xc3K\x13\x89>\x10\x13#\x89 \xf4\x0fma&S\x5c\xe2\xc3\x1e!\xca\xdb\xc4\xf8d\xa61F\xa3)-P\x1a\xa0\xcc9\x17]\xdc\x1e\xdc\x93\x1b\x89\x8b>`\x0a\x02\xca\xbf 8\xd9d\xfa\xddCO\xeb\xa5*\xb4P\x9ckO\xf2kB{{9\x9f\xfe\xfe|\xcf\xf9\xddC\x00(\x93-\xfa\x92\xde!\x11Q\x0e\x11\xe5\x12Q\x1e\x11m#\xa2|\x22*H\xb0|\xf1Y\x9e\xb86G|7m#\x9d\x80\x12\x11m\x11\x93- \xa2B\x22\xdaAD;\x89\xa8\x8c\x88\xccDd!\xa2]\xc2,\xe2\xbd2q\xcd\x0e\xf1\x9d\x02q\x8f-\xe9\x80M\x07\xa0$~\xfd|\x22*\x22\xa2\x12I\x92,\xb2,W)\x8ar\x8a1\x16PU\xf5\x9e\xae\xebC\xba\xae\xcfs\xce\x979\xe7\xcb\xba\xae\xcf\xeb\xba>\xa4\xaa\xea=\xc6X@Q\x94S\xb2,WI\x92d!\xa2\x12q\xaf|qo\x89sN\x1f\xb3\xcd\x024\x82\x15\x13Q\x99\xd9l\xae\xb6Z\xad\x174M\x1b\xd8m\xabDyu\x13*\x8e\xb7\xa1\xb2\xd9\x8fCgo\xe3\xf0\xc5\xfb\xa8\xbd\xfa\x08\xb5W\x1f\xe1\xf0\xc5\xfb8t\xf66*\x9b\xfd\xa88\xde\x86\xf2\xea&\xec\xb6UB\xd3\xb4\x01\xab\xd5z\xc1l6W\x0b\xef\x16\xaf\x05\xba\x19\x809\x22w\x8a\x88\xa8\xccb\xb1\x1ce\x8c\x859\xe7\x8b6o\x0b\x0e\xb4\x86Pwm\x10v_\x04\x8e\xf6a8\xfc#p\x06F\xe1\xec\x18[i\x81Q8\xfc#p\xb4\x0f\xc3\xee\x8b\xa0\xee\xda \x0e\xb4\x86`\xf3\xb6\x80s\xbe\xc8\x18\x0b[,\x96\xa3\x02\xb4H\xfc\xcf\x9c\xcd\x04\x8c\xe5\xd9v\x22*\x91e\xb9\x8a1\x16\xe4\x9c/\xed;y\x1e5\x97\x06`\xf7\x0d\xc1\xe1\x1f\x81\xab\xf3)\x5c]\xe3\xa8\x0fN\xc2\xdd=\x09whJ\xd8\xb40\xf1w\xf7$\xea\x83\x93pu\x8d\xc3\xd5\xf9\x14\x0e\xff\x08\xec\xbe!\xd4\x5c\x1a\xc0\xbe\x93\xe7\xc19_b\x8c\x05eY\xae\x12\xa1\xbb=\x96\x9f\xe9\x06\x94\x88h+\x11\x15J\x92dV\x14\xa5U\xd3\xb4HE\xd39\x1c\xb9\xfc\x00\x8e\xf6a8;\xc6\xe0\xea\x1a\x17@\xd3\xf0\xf4\xcc\xc0\xd3\xfb\x1c\xde\xdeYx\xfb\xe6>l\xbd\xb3\xf0\xf4>\x87\xa7g&\x0a\xde\x1d\x85uv\x8c\xc1\xd1>\x8c#\x97\x1f\xa0\xa2\xe9\x1c4M\x8b(\x8a\xd2*I\x92Y\x14\xa3\xad1\xc8t\x00J\xa2\xb2\x15\x9bL\xa6=\x8c1_y\xcd\x09\x1c<s\x0bv_\x04\xce\xc0(\xea\x83\x13p\x87\xa6\xe0\xe9\x99\x81\xb7w\x16\x0d\xe1y4\x84_\xa0\xa1\x7f\x01\x8d\xdf-\xbe\xb3\x1b/\xa3fx\xaf\xa1\x7f!zmx>\x0a\xdc3\x03wh\x0a\xf5\xc1\x098\x03\xa3\xb0\xfb\x228x\xe6\x16\xcakN\x801\xe63\x99L{Dn\xe6\xadUi\x93\x01\x8c\xc3\x95\x96\x96\xeeWU\xf5\xa6\xad\xf14j\xaf<\x84\xc3\xff\x04\xae\xcegpwOF\xc1\xfa\xe6VB\xddx\x89/n\xfc\xf9\xce\xbe\x7f\xb5\xd2\x0c\x9f\xc5\xa0c\xb0\xde\xbe\xb9(h\xf7$\x5c\x9d\xcf\xe0\xf0?A\xed\x95\x87\xb05\x9e\x86\xaa\xaa7KKK\xf7'\x03\xb9\x16`,,cpw\xf6\x1ekC\xdd\xf5\xc7\x06\xafM\xc7=\xf6\x1eT\x22\xd0Z\x96\x08+<\xea\x0eM\xc7\xbdYw\xfd1\xf6\x1ek\x83\xaa\xaaw\x0c\x90[?\x06\xb9\x16\xe0\x16\x22*4\x99L{TU\xbd\xb9\x12\xce\xe0\xb5\xfe\x85\x8d\x81\xad\x02\xda\xd0\xbf\x10\xf7f}p2\x11\xf2\xa6\x08\xd7B1\xd7\x94\x00s\x88h\xbb$If\xc6\x98\xcf\xd6x:\x0e\xe7\xee\x9e\x84\xa7\xf7\xf9\xfb^\xdb(\xd8\x07@\x8d\xde\xf4\xf4>\x87\xbb\xfb\x1d\xa4\xad\xf14\x18c>Qx\xb6\x8b9'\x05(\x09\xcd)Q\x14\xa5\xb5\xbc\xe6\x04j\xaf<|\xe7\xb9O\x01\xb7\x0ad\xcc\x93\xb5W\x1e\xa2\xbc\xe6\x04\x14Ei\x15\x12\xb2-1T?\x06\x98KDE\xb2,Wi\x9a\x169x\xe6\x16\x1c\xfe'\xa8\x0fN\xc0\xd33\x93\x14\xdc\xef\x13\xff\xe0\xeb\x9f\x966\x07\xb2g\x06\xf5\xc1\x098\xfcOp\xf0\xcc-h\x9a\x16\x11:Y$\xe6\xbe*\xa0$\x96Fe\x8c\xb1`E\xd39\xd8}\x91h\xb5\x0cM\xc3\xdb7\x97\x94\xe7\x00\xe0\xd5\xf2[\xf8\xfex\x8d\xa3i\x86\xf4\xf6\xcd\xc1\x1d\x9a\x86\xab\xf3\x19\xec\xbe\x08*\x9a\xce\x811\x16\x14+\x9e|\xa3\x17?\x04\x98KD\xc5\x16\x8b\xe5(\xe7|\xe9\xc8\xe5\x07\xf1\xbc\xf3\xf6\xce\xae,(\xabL\xc88\x1e\xcf\xbeA\xcb/Ki\x83l\xe8_\x88VW\x91\x8fG.?\x00\xe7|I,\xeb\x8a\x8d^L\x044z/\xbc\xef\xe4y8\xda\x87W\x86f\x929\x978\x96\xdf\x00\xfd\x83\xcb\xf8\xf2\x874A\x1aC\xb5}\x18\xfbN\x9e\x07c,\x9c\xe8\xc5D\xc0-DTd6\x9b\xab9\xe7\x8b5\x97\x06\xe0\xec\x18\x83;4\x15\x0f\xcdd\x0b\xca\xc7\xc6\xf8\xe2[|\xf3\xeb_\x1b\x87\x8c\x87\xea\x14\x9c\x1dc\xa8\xb94\x00\xce\xf9\xa2\xd8\x85\x14\xc5d#\x110\x8f\x88J\xacV\xeb\x05\x9b\xb7\x05v\xdf\x10\x5c]\xe3\xc2{/R\xaa\x98\xab\x8d\xb7\x00~\x1eY\xc6W?n\xd4\x8b/\xe0\xe9\x99\x81\xabk\x1cv\xdf\x10l\xde\x16X\xad\xd6\x0b\xa2\xa2\xe6%\x02JDT I\x92E\xd3\xb4\x81\x03\xad!8\xfc#+r/\x159Hf\xcc-\xbd\xc5\xb7\xbf\xfd\xbdnHc.:\xfc#8\xd0\x1a\x82\xa6i\x03b\xd3\x5c@D\x92\x110\x87\x88\x0aeY\xae\xdam\xabD\xdd\xb5A\xb8:\x9f\xc2\x1d\x9a\x8e\xcbB\xba\x01cc]\x92\x22\xc2\xb4!</*\xeaS\xd4]\x1b\xc4n[%\x84d\x14\x12Q\x8e\x110\x97\x88v(\x8ar\xaa\xbc\xba)*\x0d\x89\xe1\x99\xc2\x04R\x1d\xeb\x91\x94\xf7\xc34\x82\xf2\xea&(\x8arJ\xf4xr\x8d\x80yD\xb4\x931\x16\xa88\xde&\xaa\xa7X\xb5\xa4\x18\x9e\xeb\x01\x5c\x97\xa4\x880\x8d\xadn\x1c\xed\xc3\xa88\xde\x06\xc6X@4\xb2\xf2\x8c\x80\xdb\x88\xa8LU\xd5{\x95\xcd\xfe\x15\xf9\x97jxn\x040%I\x89USC\x1eV6\xfb\xa1\xaa\xea=!\x17\xdb\x8c\x80\xf9Dd\xd6u}\xe8\xd0\xd9\xdbQq_\x87<\xa4\x030iII\x94\x8b\xc0(\x0e\x9d\xbd\x0d]\xd7\x87DK2\xdf\x08X@D\x16]\xd7\xe7\x0f_\xbc\xff\xbe\xfe\xa5X\xe5\xd25\xd6\x92\x94D=<|\xf1>t]\x9f\x17}\xd7\x82D\xc0]\x9c\xf3\xe5\xda\xab\x8f\x04\xa0X{\xa6X`\xd2\x09\xb8\x96\xa44\xdex\x19_\x9b:;\xc6P{\xf5\x118\xe7\xcb\xa2\xb9\x9c}\x80\x9fe\x88\xdeMS\x88f|\x91\xf9\xacd\x22<\xf8:\xed2\xf1\xd9\x08}\xf3&\x09}\xc6/\xd52~\xb1\x9d\xf1\xdb\xa5O\xba\xe1\xbd\xfb\x1flx\xb3\xa2e\xb1\xa9M\xa7\xa4J\xff&7\x9d6\xadm\xd8\xfc\x99\xb4\x0d\xb3\xa2\xf1\x9b\xf1\xad\xfb\xacx\xf8\x92\xf1\x8f\xcf\xb2\xe2\x01hV<\xc2\xce\xf8C\x08Yq\x8c$+\x0e\x02e\xc5Q\xae\xac8\x8c\x975\xc7)\xb3\xe2@l\xd6\x1ci\xfe\x7f\x1cJ\xcfd\xfbw\x00\xdc\xae\x1c\xd1\x0b&\xd9\x15\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x00\xb0\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x18\x00\x00\x00\x18\x08\x04\x00\x00\x00J~\xf5s\x00\x00\x00wIDATx\x01\xcd\xcb\xcd\x09\xc5 \x10\xc4\xf1\x85\xf4\x17LC\xc1b\xf2\xd1\x8fU\xbc\xb5\x89\x89\xe1\x1d\x16ap\xf5 d\xe6\xfa\xff\xc9g\x87\x0d\x8a\xd62\x02\xe4\xbd\xe0\x7f\x857\xadA\xc7\xfaA\x1a\x03\x07\x16D\x17T\xb90\xe0\xe7\x14\xa4FN\xc0^\x82\x9b\xe7\x1cDH\x89.\x9a\x13`\xe4\xa49\x01FXN\x80\x11\x96\x13`\x84\xe5\x0c\xd8&\x80\x0co\xbf\x1a\x04\xa8\x93\xaf\x06\x86>\x1f<\x904\x0e\xf0B/\xd2&\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x00\xb6\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x000\x00\x00\x000\x08\x04\x00\x00\x00\xfd\x0b1\x0c\x00\x00\x00}IDATX\x85\xed\x8f\xcb\x09\x800\x14\x04m\xcbOc\x82\x07\xb5d-`<\x18D\x12}B|A\x90\x9d\xf32\xc3V\x95\x10B\x08\xf15\x0c\xd4\x8f\x9b\x8e>W?\x03\x8b\x9d\xa0c\x05\xc6\x1c\xfd\xc4\x8e\x91\x08z\xb2\x12\x8c`'h\x0f}\xe6\x073\xf1Zo'hY^\xeb\xef\x13n\xfa\xeb\x04\x8d\xa3>M\xb8\xeb\x93\x84\xbf>J\x94\xd0_$\xbc\xf5Q\xa2\x84\xfe\x94(\xa5\x0f\x89\x92z!\x84\x10\xbfe\x03~o\x0d;\x22\x0f\xdb;\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x00\x81\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x18\x00\x00\x00\x18\x08\x04\x00\x00\x00J~\xf5s\x00\x00\x00HIDAT8\xcbc`\x18\x05\x83\x10\xfc\xd7%,\x82,\xd9\xf8\xff\xcf\xff\x08\x14\x91h\xa0H\x03n\x0d\x0d\xff\xff\x03\x15D\xa2(\xff\x8fG\x03\xaa\x16\x22\x94#i\x89\x22R9\x92\x16b\x95\xc3\xb5\x10\xaf\x1c\xaa\xa5a4\x91\x0df\x00\x006gI\xce\x19\x83a\xf5\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x01\xaf\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x000\x00\x00\x000\x08\x04\x00\x00\x00\xfd\x0b1\x0c\x00\x00\x01vIDATx\x01\xed\xd7\xbf\xea\xdaP\x14\xc0\xf1\x18\xe3`7\x15\x87V\xdb\xa7\xa8\xad}\x0fA\xa1}\x0f\xff\xec?\xd1\xd5b\x15}\x84P\x07A|\x8f\x90\x17Pti\xd4\xd9d\xf9v\xe9v\xbd^ss\x10\x0a~\xcf*| \x06N\x8e\xf3?\xf4\x0a\x97&\x03V\x84\x9cIH8\x13\xf2\x9b>_qqLc\xfa\xc1'\xc6\x1c\xd1u`D\xdd\x1e\xa8\xb2 \xc1T\xcc\x9c\x8a\x0d\xf0\x9d\x0b\x8fv\xa2\x93\x0e(\xb0$ms\xbcG\x81wl\xb1iC\xf1\x11\xa0\xc0\x16\xdb6xf`I\x96f&\xe0\x07Yk\xdf\x03\xaa\x5c\xc8ZDY\x0f,\xb8\x9f\xf3o\xee7\xd5\x01\x1fID\x80+\xb5\xdb\xc0\x18D\x00\x18\xde\x02\x5c\x8eb\xc0\x9e\x9c\x0a|\x031\x00\xbe\xa8\xc0@\x14\xe8\xa9\xc0J\x14\xf0U \x14\x05\x02\x158\x8b\x02\x91\x0a$\xa2@\xfc\x04\xe0\xe9\x8f(D\xb2\xc0\xf0\x9af\xceW\x81>\x92uU\xa0\x89d\x0d\x15p9\x88\xfd\xc9;r*\xe00\x12\x03\xden\xef\x83:\xb1\xd0\xc2y\xaf[\x993\x11`\xa2\xdf\xc9\x15Nd\xed\x0f%=\xe0\xd0!k-\xd3\x87\xd7\x8c,\xfd\xc41\x01\x1e\x1bl[\x937\x03\x0eEKb\xad\xfd\xf8U\xc6\xe3\x97\xc5\xc3\xc9\xa7;@\xdaD)\xde\x9c\x96\xcd\x09UfJ\x8c\xa9+\x13J\xf6G`\x8d!{t\xedx\xe3\x83\xf9\x084\x8dK\x83\x1e>\x01\x1111\x11\x01>]>\x93\xb38c\xa5\xe7\x05\x18\xe7/\xb8\xf0\xc9\xd2\xf3\xf1\x97S\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00H+\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x008\x00\x00\x008\x08\x06\x00\x00\x00\xa8\x86;\x1e\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x0aOiCCPPhotoshop ICC profile\x00\x00x\xda\x9dSgTS\xe9\x16=\xf7\xde\xf4BK\x88\x80\x94KoR\x15\x08 RB\x8b\x80\x14\x91&*!\x09\x10J\x88!\xa1\xd9\x15Q\xc1\x11EE\x04\x1b\xc8\xa0\x88\x03\x8e\x8e\x80\x8c\x15Q,\x0c\x8a\x0a\xd8\x07\xe4!\xa2\x8e\x83\xa3\x88\x8a\xca\xfb\xe1{\xa3k\xd6\xbc\xf7\xe6\xcd\xfe\xb5\xd7>\xe7\xac\xf3\x9d\xb3\xcf\x07\xc0\x08\x0c\x96H3Q5\x80\x0c\xa9B\x1e\x11\xe0\x83\xc7\xc4\xc6\xe1\xe4.@\x81\x0a$p\x00\x10\x08\xb3d!s\xfd#\x01\x00\xf8~<<+\x22\xc0\x07\xbe\x00\x01x\xd3\x0b\x08\x00\xc0M\x9b\xc00\x1c\x87\xff\x0f\xeaB\x99\x5c\x01\x80\x84\x01\xc0t\x918K\x08\x80\x14\x00@z\x8eB\xa6\x00@F\x01\x80\x9d\x98&S\x00\xa0\x04\x00`\xcbcb\xe3\x00P-\x00`'\x7f\xe6\xd3\x00\x80\x9d\xf8\x99{\x01\x00[\x94!\x15\x01\xa0\x91\x00 \x13e\x88D\x00h;\x00\xac\xcfV\x8aE\x00X0\x00\x14fK\xc49\x00\xd8-\x000IWfH\x00\xb0\xb7\x00\xc0\xce\x10\x0b\xb2\x00\x08\x0c\x000Q\x88\x85)\x00\x04{\x00`\xc8##x\x00\x84\x99\x00\x14F\xf2W<\xf1+\xae\x10\xe7*\x00\x00x\x99\xb2<\xb9$9E\x81[\x08-q\x07WW.\x1e(\xceI\x17+\x146a\x02a\x9a@.\xc2y\x99\x192\x814\x0f\xe0\xf3\xcc\x00\x00\xa0\x91\x15\x11\xe0\x83\xf3\xfdx\xce\x0e\xae\xce\xce6\x8e\xb6\x0e_-\xea\xbf\x06\xff\x22bb\xe3\xfe\xe5\xcf\xabp@\x00\x00\xe1t~\xd1\xfe,/\xb3\x1a\x80;\x06\x80m\xfe\xa2%\xee\x04h^\x0b\xa0u\xf7\x8bf\xb2\x0f@\xb5\x00\xa0\xe9\xdaW\xf3p\xf8~<<E\xa1\x90\xb9\xd9\xd9\xe5\xe4\xe4\xd8J\xc4B[a\xcaW}\xfeg\xc2_\xc0W\xfdl\xf9~<\xfc\xf7\xf5\xe0\xbe\xe2$\x812]\x81G\x04\xf8\xe0\xc2\xcc\xf4L\xa5\x1c\xcf\x92\x09\x84b\xdc\xe6\x8fG\xfc\xb7\x0b\xff\xfc\x1d\xd3\x22\xc4Ib\xb9X*\x14\xe3Q\x12q\x8eD\x9a\x8c\xf32\xa5\x22\x89B\x92)\xc5%\xd2\xffd\xe2\xdf,\xfb\x03>\xdf5\x00\xb0j>\x01{\x91-\xa8]c\x03\xf6K'\x10Xt\xc0\xe2\xf7\x00\x00\xf2\xbbo\xc1\xd4(\x08\x03\x80h\x83\xe1\xcfw\xff\xef?\xfdG\xa0%\x00\x80fI\x92q\x00\x00^D$.T\xca\xb3?\xc7\x08\x00\x00D\xa0\x81*\xb0A\x1b\xf4\xc1\x18,\xc0\x06\x1c\xc1\x05\xdc\xc1\x0b\xfc`6\x84B$\xc4\xc2B\x10B\x0ad\x80\x1cr`)\xac\x82B(\x86\xcd\xb0\x1d*`/\xd4@\x1d4\xc0Qh\x86\x93p\x0e.\xc2U\xb8\x0e=p\x0f\xfaa\x08\x9e\xc1(\xbc\x81\x09\x04A\xc8\x08\x13a!\xda\x88\x01b\x8aX#\x8e\x08\x17\x99\x85\xf8!\xc1H\x04\x12\x8b$ \xc9\x88\x14Q\x22K\x915H1R\x8aT UH\x1d\xf2=r\x029\x87\x5cF\xba\x91;\xc8\x002\x82\xfc\x86\xbcG1\x94\x81\xb2Q=\xd4\x0c\xb5C\xb9\xa87\x1a\x84F\xa2\x0b\xd0dt1\x9a\x8f\x16\xa0\x9b\xd0r\xb4\x1a=\x8c6\xa1\xe7\xd0\xabh\x0f\xda\x8f>C\xc70\xc0\xe8\x18\x073\xc4l0.\xc6\xc3B\xb18,\x09\x93c\xcb\xb1\x22\xac\x0c\xab\xc6\x1a\xb0V\xac\x03\xbb\x89\xf5c\xcf\xb1w\x04\x12\x81E\xc0\x096\x04wB a\x1eAHXLXN\xd8H\xa8 \x1c$4\x11\xda\x097\x09\x03\x84Q\xc2'\x22\x93\xa8K\xb4&\xba\x11\xf9\xc4\x18b21\x87XH,#\xd6\x12\x8f\x13/\x10{\x88C\xc47$\x12\x89C2'\xb9\x90\x02I\xb1\xa4T\xd2\x12\xd2F\xd2nR#\xe9,\xa9\x9b4H\x1a#\x93\xc9\xdadk\xb2\x079\x94, +\xc8\x85\xe4\x9d\xe4\xc3\xe43\xe4\x1b\xe4!\xf2[\x0a\x9db@q\xa4\xf8S\xe2(R\xcajJ\x19\xe5\x10\xe54\xe5\x06e\x982AU\xa3\x9aR\xdd\xa8\xa1T\x115\x8fZB\xad\xa1\xb6R\xafQ\x87\xa8\x134u\x9a9\xcd\x83\x16IK\xa5\xad\xa2\x95\xd3\x1ah\x17h\xf7i\xaf\xe8t\xba\x11\xdd\x95\x1eN\x97\xd0W\xd2\xcb\xe9G\xe8\x97\xe8\x03\xf4w\x0c\x0d\x86\x15\x83\xc7\x88g(\x19\x9b\x18\x07\x18g\x19w\x18\xaf\x98L\xa6\x19\xd3\x8b\x19\xc7T071\xeb\x98\xe7\x99\x0f\x99oUX*\xb6*|\x15\x91\xca\x0a\x95J\x95&\x95\x1b*/T\xa9\xaa\xa6\xaa\xde\xaa\x0bU\xf3U\xcbT\x8f\xa9^S}\xaeFU3S\xe3\xa9\x09\xd4\x96\xabU\xaa\x9dP\xebS\x1bSg\xa9;\xa8\x87\xaag\xa8oT?\xa4~Y\xfd\x89\x06Y\xc3L\xc3OC\xa4Q\xa0\xb1_\xe3\xbc\xc6 \x0bc\x19\xb3x,!k\x0d\xab\x86u\x815\xc4&\xb1\xcd\xd9|v*\xbb\x98\xfd\x1d\xbb\x8b=\xaa\xa9\xa19C3J3W\xb3R\xf3\x94f?\x07\xe3\x98q\xf8\x9ctN\x09\xe7(\xa7\x97\xf3~\x8a\xde\x14\xef)\xe2)\x1b\xa64L\xb91e\x5ck\xaa\x96\x97\x96X\xabH\xabQ\xabG\xeb\xbd6\xae\xed\xa7\x9d\xa6\xbdE\xbbY\xfb\x81\x0eA\xc7J'\x5c'Gg\x8f\xce\x05\x9d\xe7S\xd9S\xdd\xa7\x0a\xa7\x16M=:\xf5\xae.\xaak\xa5\x1b\xa1\xbbDw\xbfn\xa7\xee\x98\x9e\xbe^\x80\x9eLo\xa7\xdey\xbd\xe7\xfa\x1c}/\xfdT\xfdm\xfa\xa7\xf5G\x0cX\x06\xb3\x0c$\x06\xdb\x0c\xce\x18<\xc55qo<\x1d/\xc7\xdb\xf1QC]\xc3@C\xa5a\x95a\x97\xe1\x84\x91\xb9\xd1<\xa3\xd5F\x8dF\x0f\x8ci\xc6\x5c\xe3$\xe3m\xc6m\xc6\xa3&\x06&!&KM\xeaM\xee\x9aRM\xb9\xa6)\xa6;L;L\xc7\xcd\xcc\xcd\xa2\xcd\xd6\x995\x9b=1\xd72\xe7\x9b\xe7\x9b\xd7\x9b\xdf\xb7`ZxZ,\xb6\xa8\xb6\xb8eI\xb2\xe4Z\xa6Y\xee\xb6\xbcn\x85Z9Y\xa5XUZ]\xb3F\xad\x9d\xad%\xd6\xbb\xad\xbb\xa7\x11\xa7\xb9N\x93N\xab\x9e\xd6g\xc3\xb0\xf1\xb6\xc9\xb6\xa9\xb7\x19\xb0\xe5\xd8\x06\xdb\xae\xb6m\xb6}agb\x17g\xb7\xc5\xae\xc3\xee\x93\xbd\x93}\xba}\x8d\xfd=\x07\x0d\x87\xd9\x0e\xab\x1dZ\x1d~s\xb4r\x14:V:\xde\x9a\xce\x9c\xee?}\xc5\xf4\x96\xe9/gX\xcf\x10\xcf\xd83\xe3\xb6\x13\xcb)\xc4i\x9dS\x9b\xd3Gg\x17g\xb9s\x83\xf3\x88\x8b\x89K\x82\xcb.\x97>.\x9b\x1b\xc6\xdd\xc8\xbd\xe4Jt\xf5q]\xe1z\xd2\xf5\x9d\x9b\xb3\x9b\xc2\xed\xa8\xdb\xaf\xee6\xeei\xee\x87\xdc\x9f\xcc4\x9f)\x9eY3s\xd0\xc3\xc8C\xe0Q\xe5\xd1?\x0b\x9f\x950k\xdf\xac~OCO\x81g\xb5\xe7#/c/\x91W\xad\xd7\xb0\xb7\xa5w\xaa\xf7a\xef\x17>\xf6>r\x9f\xe3>\xe3<7\xde2\xdeY_\xcc7\xc0\xb7\xc8\xb7\xcbO\xc3o\x9e_\x85\xdfC\x7f#\xffd\xffz\xff\xd1\x00\xa7\x80%\x01g\x03\x89\x81A\x81[\x02\xfb\xf8z|!\xbf\x8e?:\xdbe\xf6\xb2\xd9\xedA\x8c\xa0\xb9A\x15A\x8f\x82\xad\x82\xe5\xc1\xad!h\xc8\xec\x90\xad!\xf7\xe7\x98\xce\x91\xcei\x0e\x85P~\xe8\xd6\xd0\x07a\xe6a\x8b\xc3~\x0c'\x85\x87\x85W\x86?\x8ep\x88X\x1a\xd11\x975w\xd1\xdcCs\xdfD\xfaD\x96D\xde\x9bg1O9\xaf-J5*>\xaa.j<\xda7\xba4\xba?\xc6.fY\xcc\xd5X\x9dXIlK\x1c9.*\xae6nl\xbe\xdf\xfc\xed\xf3\x87\xe2\x9d\xe2\x0b\xe3{\x17\x98/\xc8]py\xa1\xce\xc2\xf4\x85\xa7\x16\xa9.\x12,:\x96@L\x88N8\x94\xf0A\x10*\xa8\x16\x8c%\xf2\x13w%\x8e\x0ay\xc2\x1d\xc2g\x22/\xd16\xd1\x88\xd8C\x5c*\x1eN\xf2H*Mz\x92\xec\x91\xbc5y$\xc53\xa5,\xe5\xb9\x84'\xa9\x90\xbcL\x0dL\xdd\x9b:\x9e\x16\x9av m2=:\xbd1\x83\x92\x91\x90qB\xaa!M\x93\xb6g\xeag\xe6fv\xcb\xace\x85\xb2\xfe\xc5n\x8b\xb7/\x1e\x95\x07\xc9k\xb3\x90\xac\x05Y-\x0a\xb6B\xa6\xe8TZ(\xd7*\x07\xb2geWf\xbf\xcd\x89\xca9\x96\xab\x9e+\xcd\xed\xcc\xb3\xca\xdb\x907\x9c\xef\x9f\xff\xed\x12\xc2\x12\xe1\x92\xb6\xa5\x86KW-\x1dX\xe6\xbd\xacj9\xb2<qy\xdb\x0a\xe3\x15\x05+\x86V\x06\xac<\xb8\x8a\xb6*m\xd5O\xab\xedW\x97\xae~\xbd&zMk\x81^\xc1\xca\x82\xc1\xb5\x01k\xeb\x0bU\x0a\xe5\x85}\xeb\xdc\xd7\xed]OX/Y\xdf\xb5a\xfa\x86\x9d\x1b>\x15\x89\x8a\xae\x14\xdb\x17\x97\x15\x7f\xd8(\xdcx\xe5\x1b\x87o\xca\xbf\x99\xdc\x94\xb4\xa9\xab\xc4\xb9d\xcff\xd2f\xe9\xe6\xde-\x9e[\x0e\x96\xaa\x97\xe6\x97\x0en\x0d\xd9\xda\xb4\x0d\xdfV\xb4\xed\xf5\xf6E\xdb/\x97\xcd(\xdb\xbb\x83\xb6C\xb9\xa3\xbf<\xb8\xbce\xa7\xc9\xce\xcd;?T\xa4T\xf4T\xfaT6\xee\xd2\xdd\xb5a\xd7\xf8n\xd1\xee\x1b{\xbc\xf64\xec\xd5\xdb[\xbc\xf7\xfd>\xc9\xbe\xdbU\x01UM\xd5f\xd5e\xfbI\xfb\xb3\xf7?\xae\x89\xaa\xe9\xf8\x96\xfbm]\xadNmq\xed\xc7\x03\xd2\x03\xfd\x07#\x0e\xb6\xd7\xb9\xd4\xd5\x1d\xd2=TR\x8f\xd6+\xebG\x0e\xc7\x1f\xbe\xfe\x9d\xefw-\x0d6\x0dU\x8d\x9c\xc6\xe2#pDy\xe4\xe9\xf7\x09\xdf\xf7\x1e\x0d:\xdav\x8c{\xac\xe1\x07\xd3\x1fv\x1dg\x1d/jB\x9a\xf2\x9aF\x9bS\x9a\xfb[b[\xbaO\xcc>\xd1\xd6\xea\xdez\xfcG\xdb\x1f\x0f\x9c4<YyJ\xf3T\xc9i\xda\xe9\x82\xd3\x93g\xf2\xcf\x8c\x9d\x95\x9d}~.\xf9\xdc`\xdb\xa2\xb6{\xe7c\xce\xdfj\x0fo\xef\xba\x10t\xe1\xd2E\xff\x8b\xe7;\xbc;\xce\x5c\xf2\xb8t\xf2\xb2\xdb\xe5\x13W\xb8W\x9a\xaf:_m\xeat\xea<\xfe\x93\xd3O\xc7\xbb\x9c\xbb\x9a\xae\xb9\x5ck\xb9\xeez\xbd\xb5{f\xf7\xe9\x1b\x9e7\xce\xdd\xf4\xbdy\xf1\x16\xff\xd6\xd5\x9e9=\xdd\xbd\xf3zo\xf7\xc5\xf7\xf5\xdf\x16\xdd~r'\xfd\xce\xcb\xbb\xd9w'\xee\xad\xbcO\xbc_\xf4@\xedA\xd9C\xdd\x87\xd5?[\xfe\xdc\xd8\xef\xdc\x7fj\xc0w\xa0\xf3\xd1\xdcG\xf7\x06\x85\x83\xcf\xfe\x91\xf5\x8f\x0fC\x05\x8f\x99\x8f\xcb\x86\x0d\x86\xeb\x9e8>99\xe2?r\xfd\xe9\xfc\xa7C\xcfd\xcf&\x9e\x17\xfe\xa2\xfe\xcb\xae\x17\x16/~\xf8\xd5\xeb\xd7\xce\xd1\x98\xd1\xa1\x97\xf2\x97\x93\xbfm|\xa5\xfd\xea\xc0\xeb\x19\xaf\xdb\xc6\xc2\xc6\x1e\xbe\xc9x31^\xf4V\xfb\xed\xc1w\xdcw\x1d\xef\xa3\xdf\x0fO\xe4| \x7f(\xffh\xf9\xb1\xf5S\xd0\xa7\xfb\x93\x19\x93\x93\xff\x04\x03\x98\xf3\xfcc3-\xdb\x00\x00:0iTXtXML:com.adobe.xmp\x00\x00\x00\x00\x00<?xpacket begin=\x22\xef\xbb\xbf\x22 id=\x22W5M0MpCehiHzreSzNTczkc9d\x22?>\x0a<x:xmpmeta xmlns:x=\x22adobe:ns:meta/\x22 x:xmptk=\x22Adobe XMP Core 5.6-c067 79.157747, 2015/03/30-23:40:42 \x22>\x0a <rdf:RDF xmlns:rdf=\x22http://www.w3.org/1999/02/22-rdf-syntax-ns#\x22>\x0a <rdf:Description rdf:about=\x22\x22\x0a xmlns:xmp=\x22http://ns.adobe.com/xap/1.0/\x22\x0a xmlns:xmpMM=\x22http://ns.adobe.com/xap/1.0/mm/\x22\x0a xmlns:stEvt=\x22http://ns.adobe.com/xap/1.0/sType/ResourceEvent#\x22\x0a xmlns:dc=\x22http://purl.org/dc/elements/1.1/\x22\x0a xmlns:photoshop=\x22http://ns.adobe.com/photoshop/1.0/\x22\x0a xmlns:tiff=\x22http://ns.adobe.com/tiff/1.0/\x22\x0a xmlns:exif=\x22http://ns.adobe.com/exif/1.0/\x22>\x0a <xmp:CreatorTool>Adobe Photoshop CC 2015 (Windows)</xmp:CreatorTool>\x0a <xmp:CreateDate>2015-09-05T14:52:22-07:00</xmp:CreateDate>\x0a <xmp:MetadataDate>2015-09-05T14:52:22-07:00</xmp:MetadataDate>\x0a <xmp:ModifyDate>2015-09-05T14:52:22-07:00</xmp:ModifyDate>\x0a <xmpMM:InstanceID>xmp.iid:6085a3b1-8ffc-b64c-a4bd-b8d8e469f035</xmpMM:InstanceID>\x0a <xmpMM:DocumentID>adobe:docid:photoshop:5d374afd-5418-11e5-864d-c7d3ee06def3</xmpMM:DocumentID>\x0a <xmpMM:OriginalDocumentID>xmp.did:a8717aa6-9cea-6b43-82ea-f7910d80fcd7</xmpMM:OriginalDocumentID>\x0a <xmpMM:History>\x0a <rdf:Seq>\x0a <rdf:li rdf:parseType=\x22Resource\x22>\x0a <stEvt:action>created</stEvt:action>\x0a <stEvt:instanceID>xmp.iid:a8717aa6-9cea-6b43-82ea-f7910d80fcd7</stEvt:instanceID>\x0a <stEvt:when>2015-09-05T14:52:22-07:00</stEvt:when>\x0a <stEvt:softwareAgent>Adobe Photoshop CC 2015 (Windows)</stEvt:softwareAgent>\x0a </rdf:li>\x0a <rdf:li rdf:parseType=\x22Resource\x22>\x0a <stEvt:action>saved</stEvt:action>\x0a <stEvt:instanceID>xmp.iid:6085a3b1-8ffc-b64c-a4bd-b8d8e469f035</stEvt:instanceID>\x0a <stEvt:when>2015-09-05T14:52:22-07:00</stEvt:when>\x0a <stEvt:softwareAgent>Adobe Photoshop CC 2015 (Windows)</stEvt:softwareAgent>\x0a <stEvt:changed>/</stEvt:changed>\x0a </rdf:li>\x0a </rdf:Seq>\x0a </xmpMM:History>\x0a <dc:format>image/png</dc:format>\x0a <photoshop:ColorMode>3</photoshop:ColorMode>\x0a <photoshop:ICCProfile>sRGB IEC61966-2.1</photoshop:ICCProfile>\x0a <tiff:Orientation>1</tiff:Orientation>\x0a <tiff:XResolution>720000/10000</tiff:XResolution>\x0a <tiff:YResolution>720000/10000</tiff:YResolution>\x0a <tiff:ResolutionUnit>2</tiff:ResolutionUnit>\x0a <exif:ColorSpace>1</exif:ColorSpace>\x0a <exif:PixelXDimension>56</exif:PixelXDimension>\x0a <exif:PixelYDimension>56</exif:PixelYDimension>\x0a </rdf:Description>\x0a </rdf:RDF>\x0a</x:xmpmeta>\x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a<?xpacket end=\x22w\x22?>\xd7\xd4\x90\xb6\x00\x00\x00 cHRM\x00\x00z%\x00\x00\x80\x83\x00\x00\xf9\xff\x00\x00\x80\xe9\x00\x00u0\x00\x00\xea`\x00\x00:\x98\x00\x00\x17o\x92_\xc5F\x00\x00\x03\x1aIDATx\xda\xec\x9a\xbfkZQ\x14\xc7\xe3\x1f\x92\xf1;u\xcb\x90\x0c\x013\x84\x04\x14\x17\x85\x0cqy1\xb6X\x04\x07\xa1 \x06K\x1c;H\x84H\xc9\x92\x97\x10\x8a\x04$!D(R\xa4\xa4\x0d\xd2\xad\x1d\xa2Y\xd2\xa9\x8b\x051\xad\xd8\x90\xb4\xda\xf8\xed\xd0+\x94G\xfb\x1a\xdf/\x1f\xef\xe5\xc0YD\xef\xf1\x03\xf7\xde\xef9\xe7\x9e\x09\x92\x13N\xf6\x89{@\xb7\x03\x02P\xba\x07\xc04\x80\x18\x80M\x00\x15\x00\xe7\x00\xda\x00z\xc2\xdb\xe2\xb3\x8a\xf8NL\xfc\xc6\xf3\x97\xf5T\xddJ\xc0Y\x00Y\x00\xa7S\xf3Az\xa54})\x99\xa1\x5c\x95a\xb9A\xa9\xd4d\xb4\xdce\xb4\xdc\xa5Tj2,7\x18\xcaU\xe9K\xc9\xf4JiN\xcd\x07\x09\xe0T\xac1k'\xc0\x00\x80]\x00\x9d\x85\xc4\x06\x97\x0a5>|y\xad\xc9\x97\x0a5.$6\x08\xa0#\xd6\x0c\x8c\x0d\x10\xc0\x0c\x80-\x00W\x81\xf5}\xae\x1c\xb64\x83)}\xe5\xb0\xc5\xc0\xfa>\x01\x5c\x89\x183\x96\x01\x8as\x12\x07P\xf7\xaf\xed1rti\x18\x98\xd2#G\x97\xf4\xaf\xed\x11@]\xc4\xf4\x98\x0a\x08`\x12@~.\x92\xe1\xf2\xf6\x99i`J_\xde>\xe3\x5c$C\x00y\xf1\x1f\x8c\x07\x04\xf0\x00@q1Y\xe0\xeaq\xc72\xb8\xa1\xaf\x1ew\xb8\x98,\x10@Q\xfc\x17\xe3\x00\xc5\x82\x07\xfe\xf4\x8e\xe5`J\xf7\xa7w\x08\xe0`\x08\xa9\x1bPl\x89\xa2\x1d\xe0\x14\x90E\x00\x93\xba\x00\xc5\xa1\xce/&\x0b\xb6\x81\x1b\xba\xd8\xaey\x00\x1e=\x80\xf1\xb9Hf,g\xee.gR\x5c<qM\x80B{\xeaZn\xcb\x0f\x9f\x7f\xf2\xc9\xeb\x1bKnW!!3Z\x00\xb7\xfck{\x9a\x02\x93\xe4u\x7f\xc0\x17\xf5\x1e\x1f\x99}\x1e\x7f\xeb\xe4\xd6H\x80\x22E\xba\xd2*\xe2\x7f\xda\xc7/\xb7|\xfa\xf6\xc6\xd4d@d<\x81Q\x00w\x03\xeb\xfb\x9a\x83*\xad\x7fK\x96/\xfa|\x5c1\x07R\xa4u\xbbw\x02\x14\x99|GOn\xf9/k~\x1b\xf0\xd9\xbb\xef\x86\x03\xae\x1c\xb6\x86\x09\xfa\xec]\x00\xb3\x0b\x89\x0d]\x01\xd5l@\xf2\xcd\xa7>\x13\xaf\x8c\x85\x14UHV\x15P\xe8\xde\xa9\x9e\x92\xe7\x7f\x80C\xfbz3\xe0\xf3\xf7?\x0c\x03\x5c*\xd4\x86\xf5\xa4G\x0dpzj>\xa8;\xd8(f\xa4\xa4\x88\xa2yZ\x0d0\xe6\x95\xd2\x96\x02\x1a))^)M\x0015\xc0M_J\xb6\x1c\xd0(I\xf1\xa5d\x02\xd8T\x03\xac\x84r\xd5\xb1\x01\xea\x95\x94P\xaeJ\x00\x155\xc0\xf3\xb0\xdc\x18+\xa0\x1eI\x09\xcb\x0d\x028W\x03lK\xa5\xa6-\x00\xb5H\x8aTj\x12@[\x0d\xb0\x17-wm\x038\xaa\xa4D\xcb]\x02\xe8\xb9\x1a\xd0v[\xf4\xc4\xe0-\xea\xf8K\xc6\x162q|\xd13M&\xc6.\xf4\x19\x93\x85\xde\xf1\xa9\x9a\xe3\x93mg\x97KV\x15\xbc'\xe3*x]\xd1\xb20\xab\xe9\xa4\xf5\xea7\xbc\xe9dF\xdb0c\xb7\xb6\xa1\xe3\x1b\xbf\xaeh\xdd;\xfe\xf1\xc5\x15\xcfg\x8e\x7f\x00u\xc5\x13\xb6+\x86\x10\x5c1F\xe2\x8aA W\x8cr\xb9f\x18\xcfM\xe3\x94\xce\x1e\x88\xbd\x9f\xd9\xbe\x074\xd7\x7f\x0d\x00%hU\x82'\xa7?o\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x00Z\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x000\x00\x00\x000\x01\x00\x00\x00\x00\x7fy\xc4*\x00\x00\x00\x02tRNS\x00\x00v\x93\xcd8\x00\x00\x00\x13IDATx\x01c\xa0\x09\xe0\xff\xff\xff\xc3\xe0\xa7h\x01\x00\xf5\x1eG\xb9\xc7\x812&\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x01l\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x000\x00\x00\x000\x08\x04\x00\x00\x00\xfd\x0b1\x0c\x00\x00\x013IDATx\x01\xec\xcd1\x0e\xc1`\x18\x87\xf1\xa6I\x87N\x9d\xd8\x18\x99\x99\xb9\xc0w\x01\x0e\xc0\x058\x00\x17\xe0\x00\x5c\x80\x03p\x00ffF6\xa6N\x1d\x9a4\x8f\xa9\x896m\xda~}\xbfA\xe2y\xb77\xff\xe4g\xfd\xfb\xb5P(\xac*Wi\x8c\xc3\x9d\x1b\x8e9`\x0e\xc0\xcc\x14\xd0\xc4\x07\xc0\xa7a\x06\xd8\x12\xb71\x01\xf4\x89\x88\x8b\xe8\xc9\x03g\xbe;I\x03c\xd2\x8d$\x01\x97'\xe9\x1e\xb8r\xc0\x92\xac\x16R@\x9b\x80\xac\x02Z2\xc0\x9e\xbcv\x12\xc0\x90\xfc`P\x17\xb0\xb9\x92\x1f\x5c\xb0\xeb\x01S\x8a\x9a\xd4\x01<\xde\x14\xf5\xc2\xd3\x07\xd6$\x8b\xff\xc9V\xba@\x97\xb0\x14\x10\xd2\xd1\x03\x8eP\x0a\x80\x83\x0e\xa0\xa04\x00\xea\xd3^\x1dZ\x01\x08\xc5@\x10\xec\xbf \xea\x03\x159b_@\xf1/\x05\xac\x88\x18\x07\x88c\x0a\x10R\xe3x\xa7\x00!%\x8e9\x00H\x89c\x0e\x00R\xe3X\x07H\x8dc\x1d 5\x8eu\x80\xd48\xd6\x01R\xe3X\x9f\x0cH\x85c\x0f\x00R\xe1\xd8\x03\x80T8\xf6\x00 5\x8eu\x80\xd48\xd6\x01R\xe3X\x07H\x81\xe3~\x03)p\xdco \x05\x8e\xaf\xec\x9a\xc0g\xf7\x93\xc0\x09<\xe3\x1a2s\xa8\x81\xcc\xfd\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00G\x9c\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x008\x00\x00\x008\x08\x06\x00\x00\x00\xa8\x86;\x1e\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x0aOiCCPPhotoshop ICC profile\x00\x00x\xda\x9dSgTS\xe9\x16=\xf7\xde\xf4BK\x88\x80\x94KoR\x15\x08 RB\x8b\x80\x14\x91&*!\x09\x10J\x88!\xa1\xd9\x15Q\xc1\x11EE\x04\x1b\xc8\xa0\x88\x03\x8e\x8e\x80\x8c\x15Q,\x0c\x8a\x0a\xd8\x07\xe4!\xa2\x8e\x83\xa3\x88\x8a\xca\xfb\xe1{\xa3k\xd6\xbc\xf7\xe6\xcd\xfe\xb5\xd7>\xe7\xac\xf3\x9d\xb3\xcf\x07\xc0\x08\x0c\x96H3Q5\x80\x0c\xa9B\x1e\x11\xe0\x83\xc7\xc4\xc6\xe1\xe4.@\x81\x0a$p\x00\x10\x08\xb3d!s\xfd#\x01\x00\xf8~<<+\x22\xc0\x07\xbe\x00\x01x\xd3\x0b\x08\x00\xc0M\x9b\xc00\x1c\x87\xff\x0f\xeaB\x99\x5c\x01\x80\x84\x01\xc0t\x918K\x08\x80\x14\x00@z\x8eB\xa6\x00@F\x01\x80\x9d\x98&S\x00\xa0\x04\x00`\xcbcb\xe3\x00P-\x00`'\x7f\xe6\xd3\x00\x80\x9d\xf8\x99{\x01\x00[\x94!\x15\x01\xa0\x91\x00 \x13e\x88D\x00h;\x00\xac\xcfV\x8aE\x00X0\x00\x14fK\xc49\x00\xd8-\x000IWfH\x00\xb0\xb7\x00\xc0\xce\x10\x0b\xb2\x00\x08\x0c\x000Q\x88\x85)\x00\x04{\x00`\xc8##x\x00\x84\x99\x00\x14F\xf2W<\xf1+\xae\x10\xe7*\x00\x00x\x99\xb2<\xb9$9E\x81[\x08-q\x07WW.\x1e(\xceI\x17+\x146a\x02a\x9a@.\xc2y\x99\x192\x814\x0f\xe0\xf3\xcc\x00\x00\xa0\x91\x15\x11\xe0\x83\xf3\xfdx\xce\x0e\xae\xce\xce6\x8e\xb6\x0e_-\xea\xbf\x06\xff\x22bb\xe3\xfe\xe5\xcf\xabp@\x00\x00\xe1t~\xd1\xfe,/\xb3\x1a\x80;\x06\x80m\xfe\xa2%\xee\x04h^\x0b\xa0u\xf7\x8bf\xb2\x0f@\xb5\x00\xa0\xe9\xdaW\xf3p\xf8~<<E\xa1\x90\xb9\xd9\xd9\xe5\xe4\xe4\xd8J\xc4B[a\xcaW}\xfeg\xc2_\xc0W\xfdl\xf9~<\xfc\xf7\xf5\xe0\xbe\xe2$\x812]\x81G\x04\xf8\xe0\xc2\xcc\xf4L\xa5\x1c\xcf\x92\x09\x84b\xdc\xe6\x8fG\xfc\xb7\x0b\xff\xfc\x1d\xd3\x22\xc4Ib\xb9X*\x14\xe3Q\x12q\x8eD\x9a\x8c\xf32\xa5\x22\x89B\x92)\xc5%\xd2\xffd\xe2\xdf,\xfb\x03>\xdf5\x00\xb0j>\x01{\x91-\xa8]c\x03\xf6K'\x10Xt\xc0\xe2\xf7\x00\x00\xf2\xbbo\xc1\xd4(\x08\x03\x80h\x83\xe1\xcfw\xff\xef?\xfdG\xa0%\x00\x80fI\x92q\x00\x00^D$.T\xca\xb3?\xc7\x08\x00\x00D\xa0\x81*\xb0A\x1b\xf4\xc1\x18,\xc0\x06\x1c\xc1\x05\xdc\xc1\x0b\xfc`6\x84B$\xc4\xc2B\x10B\x0ad\x80\x1cr`)\xac\x82B(\x86\xcd\xb0\x1d*`/\xd4@\x1d4\xc0Qh\x86\x93p\x0e.\xc2U\xb8\x0e=p\x0f\xfaa\x08\x9e\xc1(\xbc\x81\x09\x04A\xc8\x08\x13a!\xda\x88\x01b\x8aX#\x8e\x08\x17\x99\x85\xf8!\xc1H\x04\x12\x8b$ \xc9\x88\x14Q\x22K\x915H1R\x8aT UH\x1d\xf2=r\x029\x87\x5cF\xba\x91;\xc8\x002\x82\xfc\x86\xbcG1\x94\x81\xb2Q=\xd4\x0c\xb5C\xb9\xa87\x1a\x84F\xa2\x0b\xd0dt1\x9a\x8f\x16\xa0\x9b\xd0r\xb4\x1a=\x8c6\xa1\xe7\xd0\xabh\x0f\xda\x8f>C\xc70\xc0\xe8\x18\x073\xc4l0.\xc6\xc3B\xb18,\x09\x93c\xcb\xb1\x22\xac\x0c\xab\xc6\x1a\xb0V\xac\x03\xbb\x89\xf5c\xcf\xb1w\x04\x12\x81E\xc0\x096\x04wB a\x1eAHXLXN\xd8H\xa8 \x1c$4\x11\xda\x097\x09\x03\x84Q\xc2'\x22\x93\xa8K\xb4&\xba\x11\xf9\xc4\x18b21\x87XH,#\xd6\x12\x8f\x13/\x10{\x88C\xc47$\x12\x89C2'\xb9\x90\x02I\xb1\xa4T\xd2\x12\xd2F\xd2nR#\xe9,\xa9\x9b4H\x1a#\x93\xc9\xdadk\xb2\x079\x94, +\xc8\x85\xe4\x9d\xe4\xc3\xe43\xe4\x1b\xe4!\xf2[\x0a\x9db@q\xa4\xf8S\xe2(R\xcajJ\x19\xe5\x10\xe54\xe5\x06e\x982AU\xa3\x9aR\xdd\xa8\xa1T\x115\x8fZB\xad\xa1\xb6R\xafQ\x87\xa8\x134u\x9a9\xcd\x83\x16IK\xa5\xad\xa2\x95\xd3\x1ah\x17h\xf7i\xaf\xe8t\xba\x11\xdd\x95\x1eN\x97\xd0W\xd2\xcb\xe9G\xe8\x97\xe8\x03\xf4w\x0c\x0d\x86\x15\x83\xc7\x88g(\x19\x9b\x18\x07\x18g\x19w\x18\xaf\x98L\xa6\x19\xd3\x8b\x19\xc7T071\xeb\x98\xe7\x99\x0f\x99oUX*\xb6*|\x15\x91\xca\x0a\x95J\x95&\x95\x1b*/T\xa9\xaa\xa6\xaa\xde\xaa\x0bU\xf3U\xcbT\x8f\xa9^S}\xaeFU3S\xe3\xa9\x09\xd4\x96\xabU\xaa\x9dP\xebS\x1bSg\xa9;\xa8\x87\xaag\xa8oT?\xa4~Y\xfd\x89\x06Y\xc3L\xc3OC\xa4Q\xa0\xb1_\xe3\xbc\xc6 \x0bc\x19\xb3x,!k\x0d\xab\x86u\x815\xc4&\xb1\xcd\xd9|v*\xbb\x98\xfd\x1d\xbb\x8b=\xaa\xa9\xa19C3J3W\xb3R\xf3\x94f?\x07\xe3\x98q\xf8\x9ctN\x09\xe7(\xa7\x97\xf3~\x8a\xde\x14\xef)\xe2)\x1b\xa64L\xb91e\x5ck\xaa\x96\x97\x96X\xabH\xabQ\xabG\xeb\xbd6\xae\xed\xa7\x9d\xa6\xbdE\xbbY\xfb\x81\x0eA\xc7J'\x5c'Gg\x8f\xce\x05\x9d\xe7S\xd9S\xdd\xa7\x0a\xa7\x16M=:\xf5\xae.\xaak\xa5\x1b\xa1\xbbDw\xbfn\xa7\xee\x98\x9e\xbe^\x80\x9eLo\xa7\xdey\xbd\xe7\xfa\x1c}/\xfdT\xfdm\xfa\xa7\xf5G\x0cX\x06\xb3\x0c$\x06\xdb\x0c\xce\x18<\xc55qo<\x1d/\xc7\xdb\xf1QC]\xc3@C\xa5a\x95a\x97\xe1\x84\x91\xb9\xd1<\xa3\xd5F\x8dF\x0f\x8ci\xc6\x5c\xe3$\xe3m\xc6m\xc6\xa3&\x06&!&KM\xeaM\xee\x9aRM\xb9\xa6)\xa6;L;L\xc7\xcd\xcc\xcd\xa2\xcd\xd6\x995\x9b=1\xd72\xe7\x9b\xe7\x9b\xd7\x9b\xdf\xb7`ZxZ,\xb6\xa8\xb6\xb8eI\xb2\xe4Z\xa6Y\xee\xb6\xbcn\x85Z9Y\xa5XUZ]\xb3F\xad\x9d\xad%\xd6\xbb\xad\xbb\xa7\x11\xa7\xb9N\x93N\xab\x9e\xd6g\xc3\xb0\xf1\xb6\xc9\xb6\xa9\xb7\x19\xb0\xe5\xd8\x06\xdb\xae\xb6m\xb6}agb\x17g\xb7\xc5\xae\xc3\xee\x93\xbd\x93}\xba}\x8d\xfd=\x07\x0d\x87\xd9\x0e\xab\x1dZ\x1d~s\xb4r\x14:V:\xde\x9a\xce\x9c\xee?}\xc5\xf4\x96\xe9/gX\xcf\x10\xcf\xd83\xe3\xb6\x13\xcb)\xc4i\x9dS\x9b\xd3Gg\x17g\xb9s\x83\xf3\x88\x8b\x89K\x82\xcb.\x97>.\x9b\x1b\xc6\xdd\xc8\xbd\xe4Jt\xf5q]\xe1z\xd2\xf5\x9d\x9b\xb3\x9b\xc2\xed\xa8\xdb\xaf\xee6\xeei\xee\x87\xdc\x9f\xcc4\x9f)\x9eY3s\xd0\xc3\xc8C\xe0Q\xe5\xd1?\x0b\x9f\x950k\xdf\xac~OCO\x81g\xb5\xe7#/c/\x91W\xad\xd7\xb0\xb7\xa5w\xaa\xf7a\xef\x17>\xf6>r\x9f\xe3>\xe3<7\xde2\xdeY_\xcc7\xc0\xb7\xc8\xb7\xcbO\xc3o\x9e_\x85\xdfC\x7f#\xffd\xffz\xff\xd1\x00\xa7\x80%\x01g\x03\x89\x81A\x81[\x02\xfb\xf8z|!\xbf\x8e?:\xdbe\xf6\xb2\xd9\xedA\x8c\xa0\xb9A\x15A\x8f\x82\xad\x82\xe5\xc1\xad!h\xc8\xec\x90\xad!\xf7\xe7\x98\xce\x91\xcei\x0e\x85P~\xe8\xd6\xd0\x07a\xe6a\x8b\xc3~\x0c'\x85\x87\x85W\x86?\x8ep\x88X\x1a\xd11\x975w\xd1\xdcCs\xdfD\xfaD\x96D\xde\x9bg1O9\xaf-J5*>\xaa.j<\xda7\xba4\xba?\xc6.fY\xcc\xd5X\x9dXIlK\x1c9.*\xae6nl\xbe\xdf\xfc\xed\xf3\x87\xe2\x9d\xe2\x0b\xe3{\x17\x98/\xc8]py\xa1\xce\xc2\xf4\x85\xa7\x16\xa9.\x12,:\x96@L\x88N8\x94\xf0A\x10*\xa8\x16\x8c%\xf2\x13w%\x8e\x0ay\xc2\x1d\xc2g\x22/\xd16\xd1\x88\xd8C\x5c*\x1eN\xf2H*Mz\x92\xec\x91\xbc5y$\xc53\xa5,\xe5\xb9\x84'\xa9\x90\xbcL\x0dL\xdd\x9b:\x9e\x16\x9av m2=:\xbd1\x83\x92\x91\x90qB\xaa!M\x93\xb6g\xeag\xe6fv\xcb\xace\x85\xb2\xfe\xc5n\x8b\xb7/\x1e\x95\x07\xc9k\xb3\x90\xac\x05Y-\x0a\xb6B\xa6\xe8TZ(\xd7*\x07\xb2geWf\xbf\xcd\x89\xca9\x96\xab\x9e+\xcd\xed\xcc\xb3\xca\xdb\x907\x9c\xef\x9f\xff\xed\x12\xc2\x12\xe1\x92\xb6\xa5\x86KW-\x1dX\xe6\xbd\xacj9\xb2<qy\xdb\x0a\xe3\x15\x05+\x86V\x06\xac<\xb8\x8a\xb6*m\xd5O\xab\xedW\x97\xae~\xbd&zMk\x81^\xc1\xca\x82\xc1\xb5\x01k\xeb\x0bU\x0a\xe5\x85}\xeb\xdc\xd7\xed]OX/Y\xdf\xb5a\xfa\x86\x9d\x1b>\x15\x89\x8a\xae\x14\xdb\x17\x97\x15\x7f\xd8(\xdcx\xe5\x1b\x87o\xca\xbf\x99\xdc\x94\xb4\xa9\xab\xc4\xb9d\xcff\xd2f\xe9\xe6\xde-\x9e[\x0e\x96\xaa\x97\xe6\x97\x0en\x0d\xd9\xda\xb4\x0d\xdfV\xb4\xed\xf5\xf6E\xdb/\x97\xcd(\xdb\xbb\x83\xb6C\xb9\xa3\xbf<\xb8\xbce\xa7\xc9\xce\xcd;?T\xa4T\xf4T\xfaT6\xee\xd2\xdd\xb5a\xd7\xf8n\xd1\xee\x1b{\xbc\xf64\xec\xd5\xdb[\xbc\xf7\xfd>\xc9\xbe\xdbU\x01UM\xd5f\xd5e\xfbI\xfb\xb3\xf7?\xae\x89\xaa\xe9\xf8\x96\xfbm]\xadNmq\xed\xc7\x03\xd2\x03\xfd\x07#\x0e\xb6\xd7\xb9\xd4\xd5\x1d\xd2=TR\x8f\xd6+\xebG\x0e\xc7\x1f\xbe\xfe\x9d\xefw-\x0d6\x0dU\x8d\x9c\xc6\xe2#pDy\xe4\xe9\xf7\x09\xdf\xf7\x1e\x0d:\xdav\x8c{\xac\xe1\x07\xd3\x1fv\x1dg\x1d/jB\x9a\xf2\x9aF\x9bS\x9a\xfb[b[\xbaO\xcc>\xd1\xd6\xea\xdez\xfcG\xdb\x1f\x0f\x9c4<YyJ\xf3T\xc9i\xda\xe9\x82\xd3\x93g\xf2\xcf\x8c\x9d\x95\x9d}~.\xf9\xdc`\xdb\xa2\xb6{\xe7c\xce\xdfj\x0fo\xef\xba\x10t\xe1\xd2E\xff\x8b\xe7;\xbc;\xce\x5c\xf2\xb8t\xf2\xb2\xdb\xe5\x13W\xb8W\x9a\xaf:_m\xeat\xea<\xfe\x93\xd3O\xc7\xbb\x9c\xbb\x9a\xae\xb9\x5ck\xb9\xeez\xbd\xb5{f\xf7\xe9\x1b\x9e7\xce\xdd\xf4\xbdy\xf1\x16\xff\xd6\xd5\x9e9=\xdd\xbd\xf3zo\xf7\xc5\xf7\xf5\xdf\x16\xdd~r'\xfd\xce\xcb\xbb\xd9w'\xee\xad\xbcO\xbc_\xf4@\xedA\xd9C\xdd\x87\xd5?[\xfe\xdc\xd8\xef\xdc\x7fj\xc0w\xa0\xf3\xd1\xdcG\xf7\x06\x85\x83\xcf\xfe\x91\xf5\x8f\x0fC\x05\x8f\x99\x8f\xcb\x86\x0d\x86\xeb\x9e8>99\xe2?r\xfd\xe9\xfc\xa7C\xcfd\xcf&\x9e\x17\xfe\xa2\xfe\xcb\xae\x17\x16/~\xf8\xd5\xeb\xd7\xce\xd1\x98\xd1\xa1\x97\xf2\x97\x93\xbfm|\xa5\xfd\xea\xc0\xeb\x19\xaf\xdb\xc6\xc2\xc6\x1e\xbe\xc9x31^\xf4V\xfb\xed\xc1w\xdcw\x1d\xef\xa3\xdf\x0fO\xe4| \x7f(\xffh\xf9\xb1\xf5S\xd0\xa7\xfb\x93\x19\x93\x93\xff\x04\x03\x98\xf3\xfcc3-\xdb\x00\x00:0iTXtXML:com.adobe.xmp\x00\x00\x00\x00\x00<?xpacket begin=\x22\xef\xbb\xbf\x22 id=\x22W5M0MpCehiHzreSzNTczkc9d\x22?>\x0a<x:xmpmeta xmlns:x=\x22adobe:ns:meta/\x22 x:xmptk=\x22Adobe XMP Core 5.6-c067 79.157747, 2015/03/30-23:40:42 \x22>\x0a <rdf:RDF xmlns:rdf=\x22http://www.w3.org/1999/02/22-rdf-syntax-ns#\x22>\x0a <rdf:Description rdf:about=\x22\x22\x0a xmlns:xmp=\x22http://ns.adobe.com/xap/1.0/\x22\x0a xmlns:xmpMM=\x22http://ns.adobe.com/xap/1.0/mm/\x22\x0a xmlns:stEvt=\x22http://ns.adobe.com/xap/1.0/sType/ResourceEvent#\x22\x0a xmlns:dc=\x22http://purl.org/dc/elements/1.1/\x22\x0a xmlns:photoshop=\x22http://ns.adobe.com/photoshop/1.0/\x22\x0a xmlns:tiff=\x22http://ns.adobe.com/tiff/1.0/\x22\x0a xmlns:exif=\x22http://ns.adobe.com/exif/1.0/\x22>\x0a <xmp:CreatorTool>Adobe Photoshop CC 2015 (Windows)</xmp:CreatorTool>\x0a <xmp:CreateDate>2015-09-05T14:55:01-07:00</xmp:CreateDate>\x0a <xmp:MetadataDate>2015-09-05T14:55:01-07:00</xmp:MetadataDate>\x0a <xmp:ModifyDate>2015-09-05T14:55:01-07:00</xmp:ModifyDate>\x0a <xmpMM:InstanceID>xmp.iid:1908167c-e866-8b4b-8154-22093cbfb4e8</xmpMM:InstanceID>\x0a <xmpMM:DocumentID>adobe:docid:photoshop:bb1908d8-5418-11e5-864d-c7d3ee06def3</xmpMM:DocumentID>\x0a <xmpMM:OriginalDocumentID>xmp.did:03b365c0-f6bc-9447-842d-c2c5642b15ce</xmpMM:OriginalDocumentID>\x0a <xmpMM:History>\x0a <rdf:Seq>\x0a <rdf:li rdf:parseType=\x22Resource\x22>\x0a <stEvt:action>created</stEvt:action>\x0a <stEvt:instanceID>xmp.iid:03b365c0-f6bc-9447-842d-c2c5642b15ce</stEvt:instanceID>\x0a <stEvt:when>2015-09-05T14:55:01-07:00</stEvt:when>\x0a <stEvt:softwareAgent>Adobe Photoshop CC 2015 (Windows)</stEvt:softwareAgent>\x0a </rdf:li>\x0a <rdf:li rdf:parseType=\x22Resource\x22>\x0a <stEvt:action>saved</stEvt:action>\x0a <stEvt:instanceID>xmp.iid:1908167c-e866-8b4b-8154-22093cbfb4e8</stEvt:instanceID>\x0a <stEvt:when>2015-09-05T14:55:01-07:00</stEvt:when>\x0a <stEvt:softwareAgent>Adobe Photoshop CC 2015 (Windows)</stEvt:softwareAgent>\x0a <stEvt:changed>/</stEvt:changed>\x0a </rdf:li>\x0a </rdf:Seq>\x0a </xmpMM:History>\x0a <dc:format>image/png</dc:format>\x0a <photoshop:ColorMode>3</photoshop:ColorMode>\x0a <photoshop:ICCProfile>sRGB IEC61966-2.1</photoshop:ICCProfile>\x0a <tiff:Orientation>1</tiff:Orientation>\x0a <tiff:XResolution>720000/10000</tiff:XResolution>\x0a <tiff:YResolution>720000/10000</tiff:YResolution>\x0a <tiff:ResolutionUnit>2</tiff:ResolutionUnit>\x0a <exif:ColorSpace>1</exif:ColorSpace>\x0a <exif:PixelXDimension>56</exif:PixelXDimension>\x0a <exif:PixelYDimension>56</exif:PixelYDimension>\x0a </rdf:Description>\x0a </rdf:RDF>\x0a</x:xmpmeta>\x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a<?xpacket end=\x22w\x22?>\x10F7\xbb\x00\x00\x00 cHRM\x00\x00z%\x00\x00\x80\x83\x00\x00\xf9\xff\x00\x00\x80\xe9\x00\x00u0\x00\x00\xea`\x00\x00:\x98\x00\x00\x17o\x92_\xc5F\x00\x00\x02\x8bIDATx\xda\xec\x9a1k\xf2P\x14\x86\xf5\x87t|\xc0\xa2\x89\x89&iB\xffB\xe9\xdf\x10\xba\x8a\xbb\x8b\x8b\x8bP\x0bN\xe2\xe4Vp\xcc^\xea/(E\x10\x0aupsp\xe8\xe4r\xbe\xc1\x9bE\xfa\x85\xc4V\x9b\xde[\xe1]\xc4\xdc\xe41\xf7\xde\xf3\x9esOIDJ:\xab\xf4\x07h: p\xa82\x10\x00-\xe0\x1e\x88\x81W`\x03\xec\x946\xea\xbbX\xfd\xa6\xa5\xae)\x7f2^\xaa\xce\x09x\x0dt\x81\xa7\xdb\xcb\x8a\xb4\xad\xaa\x0c\x1d[fMG\xe6AC\x16\xa1'\xab\xc8\x97U\xe4\xcb\x22\xf4d\x1e4d\xd6td\xe8\xd8\xd2\xb6\xaar{Y\x11\xe0I\x8dq]$\xc0\x1b`\x02l{uKb\xcf\x95u\xe4\x1f\xa5\xd8s\xa5W\xb7\x04\xd8\xaa1o~\x0c\x10\xb8\x02F\xc0\xc7\xd8\xad\xcb2\xf4\x8e\x06;\xd42\xf4d\xec\xd6\x05\xf8P\xf7\xb8:\x1b\xa0Z'w\xc0\xcb\xc8\xb5\xe5\xed\x1b\xc1\x0e\xf5\x16z2rm\x01^\xd4=\xcb'\x05\x04.\x80A\xc7\xaa\xc9s\xd08\x19\xd8\xa1\x9e\x83\x86t\xac\x9a\x00\x03\xf5\x0c\xdf\x0f\x08T\x81i\xdf\xb1\xe4=\xf4\xcf\x06\x97\xe8=\xf4\xa5\xefX\x02L\xd5\xb3|\x1f\xa0\x1a\xf0\xf1\xc1\xb5\xcf\x0ev\xa8\x87\xfd\x94}L \xbf\x0c\xa8\xa6\xc4\xb4\x08p\x07\x90S\xe0\xe2K\x80jQ\x0f\xfa\x8eU\x18\xb8Dj\xba\x0e\x80\xf2W\x00\xef:V\xedG\xd6\x5c\x965\xa96\x9e\xbb\xa3\x00U\xecy9\xe7ny\xcc\xee\xaaB\xc8\xd51\x80\xa3Q\x81\xd6\xdd\xff\xa4\xe2\xe4(\x17\xa0\xb2H\x1fY\x82\xf8\xa9?Y\xcc\x80r<7y\x00'c\xb7\x9e\xe9\x1f\xfci\xc0u\xe4'\xb6n\x92\x09P9\xf9mVoY\x04\xc0\xe5\xfe-n\x81\xeb,\x80\xdd^={X(\x02\xe0:\xf2\x93,\xa4\x9b\x0a\xa8\xe2\xdeS\x9e\x94\xa7(\x80\xb1\xe7&\xf9d9\x0d0\xb8\xbd\xac\xe4\xda\xc5\x8a\x02\xb8\x8e\xfc$i\x0e\xd2\x00[m\xab\xfak\x01\xdbVU\x80V\x1a\xe0\xfd\xd0\xb1\x7f-\xe0\xd0\xb1\x05\xb8O\x03\x8cgM\xe7\xd7\x02\xce\x9a\x8e\x00q\x1a\xe0\xeb<\xa75+\x12\xe0|o\xdd^\xd3\x007\x8b\x9c%\x88\x22\x01.\xf6\xf1p\x93\x06\xb8[\xe5\xf4\x82E\x02\x5cE\xbe\x00;\xa3\x01\xb5\x9f\xa2\xdao2\xda\x87\x09\xed\x03\xbd\xf6VM{\xb3\xadw\xba\xa4}\xc2kD\xc9B\xfb\xa2\x93\x11eC\xed\x0b\xbfF\x94\xee\xb5?|1\xe2\xf8L\xfb\x03P#\x8e\xb0\x8dhB0\xa2\x8d\xc4\x88F #Z\xb9\x8ci\xc63\xa9\x9dR\xef\x86\xd8\xbf\x9e\xed?\xc0\xd3\xea\xdf\x00oq\xc0\xdd\x9d\x80\xbd3\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x00\x9d\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x18\x00\x00\x00\x18\x08\x04\x00\x00\x00J~\xf5s\x00\x00\x00dIDAT8\xcb\xed\xd21\x11\x83P\x10\x04\xd0OC\x89\x80(\x08\x1a\xa2\x02\x15\x89\x0bP\x11\x5c\xc4\xc4\xf7\x80\x03\xfa\xb44<4lA\x05\xdb\xbf\x99\xdb\x9b-\xe5N)~\x1e\x19\xe0\xef\xa3I\x00T}\x06\xd8L\xda\x04\xc0\xe2\x95\x01v\xb3.\x01\xb0\x1aN\x04\xbborRTz3&o\xad\x9e\xc94\xde\xc94\xd2\xf1]!\x07|\xdf\xaaOFT!R\x00\x00\x00\x00IEND\xaeB`\x82"
qt_resource_name = "\x00\x02\x00\x00\x07\xb9\x00u\x00i\x00\x03\x00\x00x\xc3\x00r\x00e\x00s\x00\x1f\x02\xc4-\xc7\x00i\x00c\x00_\x00p\x00l\x00a\x00y\x00_\x00a\x00r\x00r\x00o\x00w\x00_\x00w\x00h\x00i\x00t\x00e\x00_\x002\x004\x00d\x00p\x00_\x002\x00x\x00.\x00p\x00n\x00g\x00,\x0aXU'\x00i\x00c\x00_\x00c\x00h\x00e\x00c\x00k\x00_\x00b\x00o\x00x\x00_\x00o\x00u\x00t\x00l\x00i\x00n\x00e\x00_\x00b\x00l\x00a\x00n\x00k\x00_\x00w\x00h\x00i\x00t\x00e\x00_\x002\x004\x00d\x00p\x00_\x001\x00x\x00.\x00p\x00n\x00g\x00\x1b\x00zw\x87\x00i\x00c\x00_\x00c\x00l\x00o\x00s\x00e\x00d\x00_\x00w\x00h\x00i\x00t\x00e\x00_\x002\x004\x00d\x00p\x00_\x002\x00x\x00.\x00p\x00n\x00g\x00\x19\x05\xcaw\x87\x00i\x00c\x00_\x00d\x00o\x00n\x00e\x00_\x00w\x00h\x00i\x00t\x00e\x00_\x002\x004\x00d\x00p\x00_\x002\x00x\x00.\x00p\x00n\x00g\x00%\x06;u\xe7\x00i\x00c\x00_\x00p\x00l\x00a\x00y\x00_\x00a\x00r\x00r\x00o\x00w\x00_\x00c\x00o\x00l\x00o\x00r\x00_\x00h\x00o\x00v\x00e\x00r\x00_\x002\x004\x00d\x00p\x00_\x002\x00x\x00.\x00p\x00n\x00g\x00\x1e\x09\xe8\x96'\x00i\x00c\x00_\x00c\x00h\x00e\x00c\x00k\x00_\x00b\x00o\x00x\x00_\x00w\x00h\x00i\x00t\x00e\x00_\x002\x004\x00d\x00p\x00_\x001\x00x\x00.\x00p\x00n\x00g\x00 \x0b\x98L\x07\x00i\x00c\x00_\x00e\x00x\x00p\x00a\x00n\x00d\x00_\x00m\x00o\x00r\x00e\x00_\x00w\x00h\x00i\x00t\x00e\x00_\x002\x004\x00d\x00p\x00_\x002\x00x\x00.\x00p\x00n\x00g\x00 \x0b\x88L\x07\x00i\x00c\x00_\x00e\x00x\x00p\x00a\x00n\x00d\x00_\x00m\x00o\x00r\x00e\x00_\x00w\x00h\x00i\x00t\x00e\x00_\x002\x004\x00d\x00p\x00_\x001\x00x\x00.\x00p\x00n\x00g\x00\x1a\x0f\x83g\x87\x00i\x00c\x00_\x00e\x00r\x00r\x00o\x00r\x00_\x00w\x00h\x00i\x00t\x00e\x00_\x002\x004\x00d\x00p\x00_\x002\x00x\x00.\x00p\x00n\x00g\x00\x1f\x0c\x1bb\xe7\x00i\x00c\x00_\x00p\x00l\x00a\x00y\x00_\x00a\x00r\x00r\x00o\x00w\x00_\x00c\x00o\x00l\x00o\x00r\x00_\x002\x004\x00d\x00p\x00_\x002\x00x\x00.\x00p\x00n\x00g\x00\x19\x01\xd2\x9dG\x00i\x00c\x00_\x00s\x00t\x00o\x00p\x00_\x00w\x00h\x00i\x00t\x00e\x00_\x002\x004\x00d\x00p\x00_\x002\x00x\x00.\x00p\x00n\x00g\x00\x1c\x0dB7\x87\x00i\x00c\x00_\x00w\x00a\x00r\x00n\x00i\x00n\x00g\x00_\x00w\x00h\x00i\x00t\x00e\x00_\x002\x004\x00d\x00p\x00_\x002\x00x\x00.\x00p\x00n\x00g\x00\x19\x0f\x0c\xd0g\x00i\x00c\x00_\x00s\x00t\x00o\x00p\x00_\x00c\x00o\x00l\x00o\x00r\x00_\x002\x004\x00d\x00p\x00_\x002\x00x\x00.\x00p\x00n\x00g\x00\x1f\x02\xd4-\xc7\x00i\x00c\x00_\x00p\x00l\x00a\x00y\x00_\x00a\x00r\x00r\x00o\x00w\x00_\x00w\x00h\x00i\x00t\x00e\x00_\x002\x004\x00d\x00p\x00_\x001\x00x\x00.\x00p\x00n\x00g"
qt_resource_struct = "\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x0a\x00\x02\x00\x00\x00\x0e\x00\x00\x00\x03\x00\x00\x00\xb8\x00\x00\x00\x00\x00\x01\x00\x00J\x0e\x00\x00\x02\xc8\x00\x00\x00\x00\x00\x01\x00\x01\x1d\xd8\x00\x00\x00\x16\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x03v\x00\x00\x00\x00\x00\x01\x00\x01gF\x00\x00\x00\xf4\x00\x00\x00\x00\x00\x01\x00\x00\x85&\x00\x00\x01,\x00\x00\x00\x00\x00\x01\x00\x00\x85\xf1\x00\x00\x01|\x00\x00\x00\x00\x00\x01\x00\x00\xd2\x03\x00\x00\x00Z\x00\x00\x00\x00\x00\x01\x00\x00I\x93\x00\x00\x02\x04\x00\x00\x00\x00\x00\x01\x00\x00\xd3q\x00\x00\x01\xbe\x00\x00\x00\x00\x00\x01\x00\x00\xd2\xb7\x00\x00\x02\x84\x00\x00\x00\x00\x00\x01\x00\x00\xd5\xa9\x00\x00\x03\x00\x00\x00\x00\x00\x00\x01\x00\x01\x1e6\x00\x00\x03>\x00\x00\x00\x00\x00\x01\x00\x01\x1f\xa6\x00\x00\x02J\x00\x00\x00\x00\x00\x01\x00\x00\xd3\xf6"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| 6,005.318182
| 128,216
| 0.397262
| 15,003
| 132,117
| 3.488969
| 0.109645
| 0.068774
| 0.102302
| 0.135256
| 0.749642
| 0.742688
| 0.741847
| 0.735008
| 0.714318
| 0.705626
| 0
| 0.245697
| 0.474504
| 132,117
| 21
| 128,217
| 6,291.285714
| 0.50826
| 0.001378
| 0
| 0
| 0
| 0.333333
| 0.997256
| 0.476362
| 0
| 0
| 0.000061
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.111111
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
f80d5cbf2b1f4889eea1ac62ad358942a56d009b
| 159
|
py
|
Python
|
20.py
|
lycantropos/Project-Euler
|
df6c5a7e118ddd3fe3c39f4b30ad57e7d3ea853b
|
[
"MIT"
] | null | null | null |
20.py
|
lycantropos/Project-Euler
|
df6c5a7e118ddd3fe3c39f4b30ad57e7d3ea853b
|
[
"MIT"
] | null | null | null |
20.py
|
lycantropos/Project-Euler
|
df6c5a7e118ddd3fe3c39f4b30ad57e7d3ea853b
|
[
"MIT"
] | null | null | null |
from math import factorial
from utils import number_digits_sum
assert number_digits_sum(factorial(10)) == 27
assert number_digits_sum(factorial(100)) == 648
| 22.714286
| 47
| 0.811321
| 24
| 159
| 5.125
| 0.541667
| 0.292683
| 0.365854
| 0.341463
| 0.487805
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070922
| 0.113208
| 159
| 6
| 48
| 26.5
| 0.801418
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
f8c3cfe8db41db2f97039670240b422982c9671f
| 28,200
|
py
|
Python
|
sdk/python/pulumi_yandex/lb_network_load_balancer.py
|
pulumi/pulumi-yandex
|
559a0c82fd2b834bb5f1dc3abbf0dab689b13a3e
|
[
"ECL-2.0",
"Apache-2.0"
] | 9
|
2021-04-20T15:39:41.000Z
|
2022-02-20T09:14:39.000Z
|
sdk/python/pulumi_yandex/lb_network_load_balancer.py
|
pulumi/pulumi-yandex
|
559a0c82fd2b834bb5f1dc3abbf0dab689b13a3e
|
[
"ECL-2.0",
"Apache-2.0"
] | 56
|
2021-04-20T11:31:03.000Z
|
2022-03-31T15:53:06.000Z
|
sdk/python/pulumi_yandex/lb_network_load_balancer.py
|
pulumi/pulumi-yandex
|
559a0c82fd2b834bb5f1dc3abbf0dab689b13a3e
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['LbNetworkLoadBalancerArgs', 'LbNetworkLoadBalancer']
@pulumi.input_type
class LbNetworkLoadBalancerArgs:
def __init__(__self__, *,
attached_target_groups: Optional[pulumi.Input[Sequence[pulumi.Input['LbNetworkLoadBalancerAttachedTargetGroupArgs']]]] = None,
description: Optional[pulumi.Input[str]] = None,
folder_id: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
listeners: Optional[pulumi.Input[Sequence[pulumi.Input['LbNetworkLoadBalancerListenerArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
region_id: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a LbNetworkLoadBalancer resource.
:param pulumi.Input[Sequence[pulumi.Input['LbNetworkLoadBalancerAttachedTargetGroupArgs']]] attached_target_groups: An AttachedTargetGroup resource. The structure is documented below.
:param pulumi.Input[str] description: An optional description of the network load balancer. Provide this property when
you create the resource.
:param pulumi.Input[str] folder_id: The ID of the folder to which the resource belongs.
If omitted, the provider folder is used.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: Labels to assign to this network load balancer. A list of key/value pairs.
:param pulumi.Input[Sequence[pulumi.Input['LbNetworkLoadBalancerListenerArgs']]] listeners: Listener specification that will be used by a network load balancer. The structure is documented below.
:param pulumi.Input[str] name: Name of the listener. The name must be unique for each listener on a single load balancer.
:param pulumi.Input[str] region_id: ID of the availability zone where the network load balancer resides.
The default is 'ru-central1'.
:param pulumi.Input[str] type: Type of the network load balancer. Must be one of 'external' or 'internal'. The default is 'external'.
"""
if attached_target_groups is not None:
pulumi.set(__self__, "attached_target_groups", attached_target_groups)
if description is not None:
pulumi.set(__self__, "description", description)
if folder_id is not None:
pulumi.set(__self__, "folder_id", folder_id)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if listeners is not None:
pulumi.set(__self__, "listeners", listeners)
if name is not None:
pulumi.set(__self__, "name", name)
if region_id is not None:
pulumi.set(__self__, "region_id", region_id)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="attachedTargetGroups")
def attached_target_groups(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['LbNetworkLoadBalancerAttachedTargetGroupArgs']]]]:
"""
An AttachedTargetGroup resource. The structure is documented below.
"""
return pulumi.get(self, "attached_target_groups")
@attached_target_groups.setter
def attached_target_groups(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['LbNetworkLoadBalancerAttachedTargetGroupArgs']]]]):
pulumi.set(self, "attached_target_groups", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
An optional description of the network load balancer. Provide this property when
you create the resource.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="folderId")
def folder_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the folder to which the resource belongs.
If omitted, the provider folder is used.
"""
return pulumi.get(self, "folder_id")
@folder_id.setter
def folder_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "folder_id", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Labels to assign to this network load balancer. A list of key/value pairs.
"""
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter
def listeners(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['LbNetworkLoadBalancerListenerArgs']]]]:
"""
Listener specification that will be used by a network load balancer. The structure is documented below.
"""
return pulumi.get(self, "listeners")
@listeners.setter
def listeners(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['LbNetworkLoadBalancerListenerArgs']]]]):
pulumi.set(self, "listeners", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the listener. The name must be unique for each listener on a single load balancer.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="regionId")
def region_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of the availability zone where the network load balancer resides.
The default is 'ru-central1'.
"""
return pulumi.get(self, "region_id")
@region_id.setter
def region_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region_id", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
Type of the network load balancer. Must be one of 'external' or 'internal'. The default is 'external'.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class _LbNetworkLoadBalancerState:
def __init__(__self__, *,
attached_target_groups: Optional[pulumi.Input[Sequence[pulumi.Input['LbNetworkLoadBalancerAttachedTargetGroupArgs']]]] = None,
created_at: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
folder_id: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
listeners: Optional[pulumi.Input[Sequence[pulumi.Input['LbNetworkLoadBalancerListenerArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
region_id: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering LbNetworkLoadBalancer resources.
:param pulumi.Input[Sequence[pulumi.Input['LbNetworkLoadBalancerAttachedTargetGroupArgs']]] attached_target_groups: An AttachedTargetGroup resource. The structure is documented below.
:param pulumi.Input[str] created_at: The network load balancer creation timestamp.
:param pulumi.Input[str] description: An optional description of the network load balancer. Provide this property when
you create the resource.
:param pulumi.Input[str] folder_id: The ID of the folder to which the resource belongs.
If omitted, the provider folder is used.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: Labels to assign to this network load balancer. A list of key/value pairs.
:param pulumi.Input[Sequence[pulumi.Input['LbNetworkLoadBalancerListenerArgs']]] listeners: Listener specification that will be used by a network load balancer. The structure is documented below.
:param pulumi.Input[str] name: Name of the listener. The name must be unique for each listener on a single load balancer.
:param pulumi.Input[str] region_id: ID of the availability zone where the network load balancer resides.
The default is 'ru-central1'.
:param pulumi.Input[str] type: Type of the network load balancer. Must be one of 'external' or 'internal'. The default is 'external'.
"""
if attached_target_groups is not None:
pulumi.set(__self__, "attached_target_groups", attached_target_groups)
if created_at is not None:
pulumi.set(__self__, "created_at", created_at)
if description is not None:
pulumi.set(__self__, "description", description)
if folder_id is not None:
pulumi.set(__self__, "folder_id", folder_id)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if listeners is not None:
pulumi.set(__self__, "listeners", listeners)
if name is not None:
pulumi.set(__self__, "name", name)
if region_id is not None:
pulumi.set(__self__, "region_id", region_id)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="attachedTargetGroups")
def attached_target_groups(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['LbNetworkLoadBalancerAttachedTargetGroupArgs']]]]:
"""
An AttachedTargetGroup resource. The structure is documented below.
"""
return pulumi.get(self, "attached_target_groups")
@attached_target_groups.setter
def attached_target_groups(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['LbNetworkLoadBalancerAttachedTargetGroupArgs']]]]):
pulumi.set(self, "attached_target_groups", value)
@property
@pulumi.getter(name="createdAt")
def created_at(self) -> Optional[pulumi.Input[str]]:
"""
The network load balancer creation timestamp.
"""
return pulumi.get(self, "created_at")
@created_at.setter
def created_at(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "created_at", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
An optional description of the network load balancer. Provide this property when
you create the resource.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="folderId")
def folder_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the folder to which the resource belongs.
If omitted, the provider folder is used.
"""
return pulumi.get(self, "folder_id")
@folder_id.setter
def folder_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "folder_id", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Labels to assign to this network load balancer. A list of key/value pairs.
"""
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter
def listeners(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['LbNetworkLoadBalancerListenerArgs']]]]:
"""
Listener specification that will be used by a network load balancer. The structure is documented below.
"""
return pulumi.get(self, "listeners")
@listeners.setter
def listeners(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['LbNetworkLoadBalancerListenerArgs']]]]):
pulumi.set(self, "listeners", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the listener. The name must be unique for each listener on a single load balancer.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="regionId")
def region_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of the availability zone where the network load balancer resides.
The default is 'ru-central1'.
"""
return pulumi.get(self, "region_id")
@region_id.setter
def region_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region_id", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
Type of the network load balancer. Must be one of 'external' or 'internal'. The default is 'external'.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
class LbNetworkLoadBalancer(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
attached_target_groups: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['LbNetworkLoadBalancerAttachedTargetGroupArgs']]]]] = None,
description: Optional[pulumi.Input[str]] = None,
folder_id: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
listeners: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['LbNetworkLoadBalancerListenerArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
region_id: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Creates a network load balancer in the specified folder using the data specified in the config.
For more information, see [the official documentation](https://cloud.yandex.com/docs/load-balancer/concepts).
## Example Usage
```python
import pulumi
import pulumi_yandex as yandex
foo = yandex.LbNetworkLoadBalancer("foo",
attached_target_groups=[yandex.LbNetworkLoadBalancerAttachedTargetGroupArgs(
healthchecks=[yandex.LbNetworkLoadBalancerAttachedTargetGroupHealthcheckArgs(
http_options=yandex.LbNetworkLoadBalancerAttachedTargetGroupHealthcheckHttpOptionsArgs(
path="/ping",
port=8080,
),
name="http",
)],
target_group_id=yandex_lb_target_group["my-target-group"]["id"],
)],
listeners=[yandex.LbNetworkLoadBalancerListenerArgs(
external_address_spec=yandex.LbNetworkLoadBalancerListenerExternalAddressSpecArgs(
ip_version="ipv4",
),
name="my-listener",
port=8080,
)])
```
## Import
A network load balancer can be imported using the `id` of the resource, e.g.
```sh
$ pulumi import yandex:index/lbNetworkLoadBalancer:LbNetworkLoadBalancer default network_load_balancer_id
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['LbNetworkLoadBalancerAttachedTargetGroupArgs']]]] attached_target_groups: An AttachedTargetGroup resource. The structure is documented below.
:param pulumi.Input[str] description: An optional description of the network load balancer. Provide this property when
you create the resource.
:param pulumi.Input[str] folder_id: The ID of the folder to which the resource belongs.
If omitted, the provider folder is used.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: Labels to assign to this network load balancer. A list of key/value pairs.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['LbNetworkLoadBalancerListenerArgs']]]] listeners: Listener specification that will be used by a network load balancer. The structure is documented below.
:param pulumi.Input[str] name: Name of the listener. The name must be unique for each listener on a single load balancer.
:param pulumi.Input[str] region_id: ID of the availability zone where the network load balancer resides.
The default is 'ru-central1'.
:param pulumi.Input[str] type: Type of the network load balancer. Must be one of 'external' or 'internal'. The default is 'external'.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[LbNetworkLoadBalancerArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Creates a network load balancer in the specified folder using the data specified in the config.
For more information, see [the official documentation](https://cloud.yandex.com/docs/load-balancer/concepts).
## Example Usage
```python
import pulumi
import pulumi_yandex as yandex
foo = yandex.LbNetworkLoadBalancer("foo",
attached_target_groups=[yandex.LbNetworkLoadBalancerAttachedTargetGroupArgs(
healthchecks=[yandex.LbNetworkLoadBalancerAttachedTargetGroupHealthcheckArgs(
http_options=yandex.LbNetworkLoadBalancerAttachedTargetGroupHealthcheckHttpOptionsArgs(
path="/ping",
port=8080,
),
name="http",
)],
target_group_id=yandex_lb_target_group["my-target-group"]["id"],
)],
listeners=[yandex.LbNetworkLoadBalancerListenerArgs(
external_address_spec=yandex.LbNetworkLoadBalancerListenerExternalAddressSpecArgs(
ip_version="ipv4",
),
name="my-listener",
port=8080,
)])
```
## Import
A network load balancer can be imported using the `id` of the resource, e.g.
```sh
$ pulumi import yandex:index/lbNetworkLoadBalancer:LbNetworkLoadBalancer default network_load_balancer_id
```
:param str resource_name: The name of the resource.
:param LbNetworkLoadBalancerArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(LbNetworkLoadBalancerArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
attached_target_groups: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['LbNetworkLoadBalancerAttachedTargetGroupArgs']]]]] = None,
description: Optional[pulumi.Input[str]] = None,
folder_id: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
listeners: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['LbNetworkLoadBalancerListenerArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
region_id: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = LbNetworkLoadBalancerArgs.__new__(LbNetworkLoadBalancerArgs)
__props__.__dict__["attached_target_groups"] = attached_target_groups
__props__.__dict__["description"] = description
__props__.__dict__["folder_id"] = folder_id
__props__.__dict__["labels"] = labels
__props__.__dict__["listeners"] = listeners
__props__.__dict__["name"] = name
__props__.__dict__["region_id"] = region_id
__props__.__dict__["type"] = type
__props__.__dict__["created_at"] = None
super(LbNetworkLoadBalancer, __self__).__init__(
'yandex:index/lbNetworkLoadBalancer:LbNetworkLoadBalancer',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
attached_target_groups: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['LbNetworkLoadBalancerAttachedTargetGroupArgs']]]]] = None,
created_at: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
folder_id: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
listeners: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['LbNetworkLoadBalancerListenerArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
region_id: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None) -> 'LbNetworkLoadBalancer':
"""
Get an existing LbNetworkLoadBalancer resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['LbNetworkLoadBalancerAttachedTargetGroupArgs']]]] attached_target_groups: An AttachedTargetGroup resource. The structure is documented below.
:param pulumi.Input[str] created_at: The network load balancer creation timestamp.
:param pulumi.Input[str] description: An optional description of the network load balancer. Provide this property when
you create the resource.
:param pulumi.Input[str] folder_id: The ID of the folder to which the resource belongs.
If omitted, the provider folder is used.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: Labels to assign to this network load balancer. A list of key/value pairs.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['LbNetworkLoadBalancerListenerArgs']]]] listeners: Listener specification that will be used by a network load balancer. The structure is documented below.
:param pulumi.Input[str] name: Name of the listener. The name must be unique for each listener on a single load balancer.
:param pulumi.Input[str] region_id: ID of the availability zone where the network load balancer resides.
The default is 'ru-central1'.
:param pulumi.Input[str] type: Type of the network load balancer. Must be one of 'external' or 'internal'. The default is 'external'.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _LbNetworkLoadBalancerState.__new__(_LbNetworkLoadBalancerState)
__props__.__dict__["attached_target_groups"] = attached_target_groups
__props__.__dict__["created_at"] = created_at
__props__.__dict__["description"] = description
__props__.__dict__["folder_id"] = folder_id
__props__.__dict__["labels"] = labels
__props__.__dict__["listeners"] = listeners
__props__.__dict__["name"] = name
__props__.__dict__["region_id"] = region_id
__props__.__dict__["type"] = type
return LbNetworkLoadBalancer(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="attachedTargetGroups")
def attached_target_groups(self) -> pulumi.Output[Optional[Sequence['outputs.LbNetworkLoadBalancerAttachedTargetGroup']]]:
"""
An AttachedTargetGroup resource. The structure is documented below.
"""
return pulumi.get(self, "attached_target_groups")
@property
@pulumi.getter(name="createdAt")
def created_at(self) -> pulumi.Output[str]:
"""
The network load balancer creation timestamp.
"""
return pulumi.get(self, "created_at")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
An optional description of the network load balancer. Provide this property when
you create the resource.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="folderId")
def folder_id(self) -> pulumi.Output[str]:
"""
The ID of the folder to which the resource belongs.
If omitted, the provider folder is used.
"""
return pulumi.get(self, "folder_id")
@property
@pulumi.getter
def labels(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Labels to assign to this network load balancer. A list of key/value pairs.
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter
def listeners(self) -> pulumi.Output[Optional[Sequence['outputs.LbNetworkLoadBalancerListener']]]:
"""
Listener specification that will be used by a network load balancer. The structure is documented below.
"""
return pulumi.get(self, "listeners")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the listener. The name must be unique for each listener on a single load balancer.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="regionId")
def region_id(self) -> pulumi.Output[Optional[str]]:
"""
ID of the availability zone where the network load balancer resides.
The default is 'ru-central1'.
"""
return pulumi.get(self, "region_id")
@property
@pulumi.getter
def type(self) -> pulumi.Output[Optional[str]]:
"""
Type of the network load balancer. Must be one of 'external' or 'internal'. The default is 'external'.
"""
return pulumi.get(self, "type")
| 47.157191
| 221
| 0.656454
| 3,134
| 28,200
| 5.738034
| 0.069879
| 0.093588
| 0.066952
| 0.059946
| 0.890897
| 0.876494
| 0.860924
| 0.856086
| 0.85325
| 0.841239
| 0
| 0.00122
| 0.244113
| 28,200
| 597
| 222
| 47.236181
| 0.842419
| 0.384255
| 0
| 0.808442
| 1
| 0
| 0.121881
| 0.07013
| 0
| 0
| 0
| 0
| 0
| 1
| 0.162338
| false
| 0.003247
| 0.022727
| 0
| 0.282468
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3e9293ed6b07cffae4a21ee8cbf56d9510e65f69
| 5,724
|
py
|
Python
|
icon.py
|
yaooort/Apputils
|
42faa2ab72f376651279cd7b47340a8822a390af
|
[
"Apache-2.0"
] | 17
|
2018-08-28T04:40:07.000Z
|
2021-12-15T06:19:31.000Z
|
icon.py
|
yaooort/Apputils
|
42faa2ab72f376651279cd7b47340a8822a390af
|
[
"Apache-2.0"
] | 4
|
2019-05-17T09:35:30.000Z
|
2022-03-13T03:50:20.000Z
|
icon.py
|
yaooort/Apputils
|
42faa2ab72f376651279cd7b47340a8822a390af
|
[
"Apache-2.0"
] | 3
|
2019-01-15T07:13:53.000Z
|
2020-03-29T00:48:39.000Z
|
img = 'AAABAAEAICAAAAEAIACoEAAAFgAAACgAAAAgAAAAQAAAAAEAIAAAAAAAABAAABILAAASCwAAAAAAAAAAAAAAAAAAjR78AJMl/ACMHfwojBz8gIsb/MaLGvzuihn7/YoZ+/+JGPv/iRf7/4kW+/+IFvv/iBX7/4cU+/+HE/v/hxL7/4YS+/+GEfv/hRD7/4UP+/+FD/r/hA76/4QN+v+DDPr+gwz674ML+seCC/qBggr6KYEI+gCCCvoAAAAAAI4g/QCOIP0FjR/8VY0e/NCMHfz8jBz8/4sb/P+LG/v/ihr7/4oZ+/+KGPv/iRj7/4kX+/+IFvv/iBT7/4cU+/+HE/v/hxL7/4YS+/+GEfv/hhH7/4UQ+/+FD/r/hA76/4QN+v+EDfr/gwz6/4ML+vyCC/rTggr6WYIL+wWCC/sAixz8AI4h/FiOIPzsjR/8/40e/P+MHfz/jB38/4wc/P+LG/z/ixr7/4oY+/+IFvv/iRb7/4wd+/+PI/v/jyT7/48j+/+NHvv/iRb7/4UQ+/+FEPv/hhH7/4UQ+/+FEPr/hQ/6/4QO+v+EDfr/gw36/4MM+v+DC/rtgwv6WYAM+gCPI/wqjyL80o4h/P+OIPz/jR/8/40f/P+NHvz/jB38/4sb/P+LGvz/lCz8/6tZ/P/FjP3/2rb+/+fQ/v/r2P7/6tb+/+LG/v/Qo/3/t3P9/5w9/P+JGPv/hQ/7/4YR+/+FEPv/hQ/6/4QP+v+EDvr/hA36/4MM+v+DDPrSgwz6KpAk/IGPI/z8jyL8/48i/P+OIfz/jiD8/40f/P+MHfz/lS/8/7Np/f/Onv7/0qf+/82c/v/JlP3/yZX9/9Gk/f/hxP7/8+f///79///+/f//8ub//9Gl/f+jSvz/iBX7/4UQ+/+GEfv/hRD7/4UP+v+EDvr/hA76/4MN+vyDDPqBkCX8x5Ak/P+QJPz/jyP8/48i/P+OIfz/jiH8/5s7/P+yZ/3/sWX9/6hU/P+eQfz/kSj8/4oa/P+KGfv/ixz7/48i+/+dPvz/u3v9/+TK/v/9+/////////Tp///Ch/3/jyL7/4UQ+/+GEfv/hRD7/4UQ+/+FD/r/hA76/4QO+seRJv3vkSb8/5Al/P+QJPz/kCP8/48i/P+TKvz/n0L8/69g/f+3cf3/p1P9/5Yx/P+SKfz/jR78/4sb/P+LG/z/ihr7/4kX+/+JFvv/lC37/8GF/f/06f////////37///Uq/3/kyz7/4YQ+/+GEvv/hhH7/4UQ+/+FD/r/hQ/675Io/f6RJ/3/kSb9/5El/P+QJfz/jyP8/5g0/P+/gf3/unf9/5g1/P+NH/z/jB38/6ZQ/P+ULfz/jBz8/4wc/P+LG/z/ixv7/4sa+/+JGPv/ihn7/6xb/P/t2/7///////7+///Wr/7/kSf7/4YS+/+GEvv/hhH7/4YR+/+FEPv+kin9/5Io/f+SJ/3/kSf9/5Al/f+YM/z/xo/9/7Rr/f+QJfz/jiH8/44h/P+NH/z/sWb9/6dS/f+MHPz/jB38/4wd/P+LHPz/ixv8/4sa+/+KGfv/iRf7/6dT/P/v3/7///////78///Ikv3/ihn7/4cT+/+HE/v/hhL7/4YR+/+TKv3/kyn9/5Ip/f+SKP3/kyr9/8CD/v+3cf3/kCX8/5Ak/P+QI/z/jyP8/40f/P+xZf3/xIz9/40f/P+NH/z/jR78/4wd/P+MHPz/ixz8/4sb/P+LGvv/iRj7/7Nq/P/48v////////fv//+qWPz/hhL7/4cU+/+HE/v/hxP7/5Qr/f+TK/3/kyr9/5In/f+pVv3/wof+/5Qs/f+RJv3/kSX8/5Al/P+QJPz/jiD8/6lW/f/hxf7/lzP8/40f/P+NH/z/jR78/40e/P+MHfz/jBz8/4sb/P+LGvz/jiD7/9Gl/f///////////9q3/v+OIfv/iBT7/4gV+/+HFPv/lS39/5Qs/f+UK/3/lS/9/7+A/v+iSP3/kSb9/5In/f+RJ/3/kSb8/5El/P+PIvz/okn8/+/h/v+tXv3/jR78/44g/P+OIPz/jR/8/40e/P+MHvz/jB38/4wc/P+KGPz/pU78//Xs////////9/D//6ZQ/P+HFPv/iBb7/4gV+/+VLv3/lS39/5Qr/f+eQP3/uHP9/5Qs/f+TKf3/kin9/5Io/f+SJ/3/kSb9/5Ak/f+fQfz/8OL//86f/v+PI/z/jyL8/44h/P+OIPz/jR/8/40f/P+NHvz/jB38/4wc/P+QJPz/27n+////////////woj9/4kW+/+JF/v/iRb7/5Yv/f+WL/3/lS39/6BD/f+mT/3/kyr9/5Qr/f+TKv3/kyn9/5Ip/f+SKP3/kSb9/5w7/f/r1///69f//5o5/P+PIvz/jyL8/48i/P+OIfz/jiD8/40f/P+NH/z/jR78/4sb/P/Dif3////////////Ztf7/jSD7/4kY+/+JGPv/lzH9/5Yw/f+WLv3/n0H9/5o4/f+ULP3/lCz9/5Qr/f+TK/3/kyr9/5Mp/f+SJ/3/mTX9/+TK/v/8+f//sGP9/48h/P+QJPz/jyP8/48i/P+OIfz/jiH8/44g/P+NH/z/ixv8/7Np/f/+/P///////+bO/v+RJ/z/ihn7/4oZ+/+XMv7/lzH+/5Yw/f+ZNf3/lzH9/5Uu/f+VLf3/lC39/5Qs/f+UK/3/kyr9/5Mp/f+WL/3/37/+///////NnP7/kSb9/5Al/P+QJPz/kCP8/48j/P+PIvz/jiH8/44g/P+MHPz/rFz9//z6////////69f+/5Mr/P+LGvz/ixr7/5gz/v+YM/7/lzL+/5cx/f+WMP3/li/9/5Yv/f+VLv3/lS39/5Qs/f+ULP3/lCv9/5Mr/f/Ysv7//////+rW//+cPf3/kCX9/5El/P+QJfz/kCT8/48j/P+PIvz/jyL8/40e/P+tXf3//Pr////////p0/7/kyv8/4sb/P+LG/z/mTX+/5g0/v+YM/7/lzL+/5cx/v+XMf7/ljD9/5Yv/f+VLv3/lS79/5Ut/f+ULP3/kyr9/8+g/v//////+/f//7Rr/v+QJP3/kSf9/5Em/f+RJfz/kCT8/5Ak/P+PI/z/jR/8/7Vu/f/+/P///////9+//v+RJ/z/jB38/4wd/P+ZNv7/mTX+/5k0/v+YNP7/mDP+/5cy/v+XMf7/lzH9/5Yw/f+WL/3/lS79/5Qs/f+SKP3/xo7+////////////0qf+/5Mr/f+SKP3/kif9/5En/f+RJvz/kCX8/5Ak/P+PIvz/xpD+////////////zJr+/44g/P+NH/z/jR78/5o3/v+aNv7/mTb+/5k1/v+YNP7/mDP+/5gz/v+XMv7/lzH+/5Yw/f+WMP3/pEz9/7Np/v/Uqv7///7////////u3f//oET9/5Io/f+SKf3/kij9/5En/f+RJv3/kCX8/5Uu/P/fwP7///////v3//+yaP3/jR78/44g/P+NH/z/mzj+/5o4/v+aN/7/mjb+/5k1/v+ZNf7/mDT+/5gz/v+YMv7/lzH+/5cw/f+mT/3/06n+//fv//////////////z5//+6d/7/kij9/5Mq/f+TKf3/kij9/5Io/f+QJP3/rFz9//fv////////6NH+/5o4/P+OIfz/jiH8/44h/P+bOv7/mzn+/5s4/v+aN/7/mjf+/5k2/v+ZNf7/mTT+/5gz/v+YM/7/lzL+/5Yw/v+aOP3/uHL+/+XM/v/9+////////9q1/v+XMf3/lCv9/5Mq/f+TKv3/kij9/5Yw/f/Ysv7///////36//++fv3/jyP8/5Aj/P+PI/z/jyL8/5w7/v+cOv7/mzn+/5s5/v+bOP7/mjf+/5o2/v+ZNv7/mTX+/5g0/v+YM/7/mDP+/5cx/v+WL/7/n0L9/8aO/v/o0f//3r3+/6BD/f+UK/3/lCz9/5Mq/f+TKv3/vn/+//v3////////3bv+/5g1/f+QJPz/kCX8/5Ak/P+PI/z/nTz//5w7/v+cO/7/nDr+/5s5/v+bOP7/mjj+/5o3/v+ZNv7/mTX+/5k1/v+YNP7/mDP+/5cy/v+WMP7/lzH9/54//f+fQv3/mDP9/5Ut/f+ULP3/lCv9/7Np/f/z5////////+vX//+mT/3/kSb9/5En/f+RJv3/kSX8/5Ak/P+dPf/9nT3//508//+cO/7/nDr+/5s6/v+bOf7/mzj+/5o3/v+aN/7/mTb+/5k1/v+ZNP7/mDT+/5gz/v+XMv7/ljD+/5Yv/f+WL/3/lS39/5cx/f+5df7/8eT////////q1f7/rFv9/5Io/f+SKf3/kij9/5In/f+RJv3/kSb9/Z4//+6ePv//nT3//508//+cPP7/nDv+/5w6/v+bOf7/mzn+/5o4/v+aNv7/mTb+/5k1/v+ZNf7/mDT+/5gz/v+XMv7/li/+/5Yw/v+kSv3/zp/+//jy///9+///3r7+/6dR/f+TKv3/lCv9/5Mq/f+TKf3/kin9/5Io/f+RJ/3un0D/xZ4///+eP///nj7//509//+dPP//nDv+/5w7/v+bOv7/nTz+/6FF/v+gQ/7/nT7+/5w6/v+bOf7/nDv+/6FE/v+uX/7/ypb+/+7e///9+///7Nr//8OI/v+dPf3/lCz9/5Ut/f+ULP3/lCv9/5Mr/f+TKv3/kyn9/5Io/cWfQf9/n0H//J9A//+eP///nj7//549//+dPf//nTz+/5w7/v+cPP7/pUz+/7Vt/v/Dh/7/y5n+/8+g/v/Wr///4sf//+3b///v3///3r3//8CD/v+jSf7/ljD9/5Yv/f+WL/3/lS79/5Ut/f+ULf3/lCz9/5Qr/f+TKv38kyr9f6BC/yigQv/Qn0H//59A//+fQP//nj///54+//+dPf//nT3//508/v+cOv7/nTz+/6NI/v+oVP7/rV7+/7Bi/v+vYP7/qVX+/6NI/v+bOv7/lzH+/5cx/v+XMv7/lzH9/5Yw/f+WMP3/li/9/5Uu/f+VLf3/lCz9/5Qs/dCUK/0ooEH/AKBC/1WgQv/roEL//59B//+fQP//nz///54///+ePv//nT3//508//+cO/7/nDr+/5s4/v+aN/7/mjb+/5k2/v+ZNf7/mTX+/5k1/v+ZNf7/mDT+/5gz/v+YMv7/lzL+/5cx/v+WMP3/li/9/5Uu/f+VLv3slS39VZYt/QCgQv8AoEL/BKBC/1WgQv/QoEL//KBB//+fQf//n0D//54///+ePv//nj3//509//+dPP7/nDv+/5w7/v+bOv7/mzn+/5s4/v+aN/7/mjf+/5k2/v+ZNf7/mTT+/5g0/v+YM/7/lzL+/5cx/v+XMf38ljD905Yv/ViVLf4FlS7+AAAAAACgQv8AoEL/AKBC/yigQv+AoEL/xqBC/+6fQf/9n0D//59A//+eP///nj7//509//+dPP//nDz+/5w7/v+cOv7/mzn+/5s5/v+aOP7/mjf+/5o2/v+ZNf7/mTX+/5g0/v6YM/7vmDL+x5cx/oGXMf4ply3/AJcw/gAAAAAAwAAAA4AAAAGAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAABgAAAAcAAAAM='
| 5,724
| 5,724
| 0.682565
| 1,290
| 5,724
| 3.028682
| 0.368992
| 0.006143
| 0.00819
| 0.00819
| 0.15613
| 0.080625
| 0.052214
| 0.024571
| 0.014333
| 0.014333
| 0
| 0.160433
| 0.000349
| 5,724
| 1
| 5,724
| 5,724
| 0.52237
| 0
| 0
| 0
| 0
| 1
| 0.998428
| 0.998428
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e41b8d53f8ccade3c875cebce6d0a34b1824775c
| 132
|
py
|
Python
|
boa3_test/test_sc/interop_test/runtime/CheckWitnessMismatchedType.py
|
hal0x2328/neo3-boa
|
6825a3533384cb01660773050719402a9703065b
|
[
"Apache-2.0"
] | 25
|
2020-07-22T19:37:43.000Z
|
2022-03-08T03:23:55.000Z
|
boa3_test/test_sc/interop_test/runtime/CheckWitnessMismatchedType.py
|
hal0x2328/neo3-boa
|
6825a3533384cb01660773050719402a9703065b
|
[
"Apache-2.0"
] | 419
|
2020-04-23T17:48:14.000Z
|
2022-03-31T13:17:45.000Z
|
boa3_test/test_sc/interop_test/runtime/CheckWitnessMismatchedType.py
|
hal0x2328/neo3-boa
|
6825a3533384cb01660773050719402a9703065b
|
[
"Apache-2.0"
] | 15
|
2020-05-21T21:54:24.000Z
|
2021-11-18T06:17:24.000Z
|
from boa3.builtin.interop.runtime import check_witness
def Main(script_hash: list) -> bool:
return check_witness(script_hash)
| 22
| 54
| 0.787879
| 19
| 132
| 5.263158
| 0.789474
| 0.24
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008696
| 0.128788
| 132
| 5
| 55
| 26.4
| 0.86087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
e485c7e23d44cb8ed2e557bc82c5c1be6ae39671
| 387
|
py
|
Python
|
datasette_query_history/__init__.py
|
bretwalker/datasette-query-history
|
c4d4cceb2a0b6f69dd96db1d7c1bf97194ad841a
|
[
"Apache-2.0"
] | 3
|
2021-01-17T05:08:00.000Z
|
2021-04-18T03:06:36.000Z
|
datasette_query_history/__init__.py
|
bretwalker/datasette-query-history
|
c4d4cceb2a0b6f69dd96db1d7c1bf97194ad841a
|
[
"Apache-2.0"
] | null | null | null |
datasette_query_history/__init__.py
|
bretwalker/datasette-query-history
|
c4d4cceb2a0b6f69dd96db1d7c1bf97194ad841a
|
[
"Apache-2.0"
] | null | null | null |
from datasette import hookimpl
@hookimpl
def extra_css_urls(database, table, columns, view_name, datasette):
return [
"/-/static-plugins/datasette_query_history/datasette-query-history.css",
]
@hookimpl
def extra_js_urls(database, table, columns, view_name, datasette):
return [
"/-/static-plugins/datasette_query_history/datasette-query-history.js",
]
| 27.642857
| 80
| 0.731266
| 46
| 387
| 5.934783
| 0.413043
| 0.205128
| 0.307692
| 0.175824
| 0.747253
| 0.747253
| 0.747253
| 0.747253
| 0.747253
| 0.747253
| 0
| 0
| 0.155039
| 387
| 13
| 81
| 29.769231
| 0.834862
| 0
| 0
| 0.363636
| 0
| 0
| 0.354005
| 0.354005
| 0
| 0
| 0
| 0
| 0
| 1
| 0.181818
| false
| 0
| 0.090909
| 0.181818
| 0.454545
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 9
|
e49b73695e8a8e6fe65244b5521604e1f4222b7a
| 66
|
py
|
Python
|
build/lib/brawlstars_api/__init__.py
|
smlbiobot/brawlstars_api
|
f40cc3e82bbcbe384dfccd33eda42d3e2331b81f
|
[
"MIT"
] | null | null | null |
build/lib/brawlstars_api/__init__.py
|
smlbiobot/brawlstars_api
|
f40cc3e82bbcbe384dfccd33eda42d3e2331b81f
|
[
"MIT"
] | null | null | null |
build/lib/brawlstars_api/__init__.py
|
smlbiobot/brawlstars_api
|
f40cc3e82bbcbe384dfccd33eda42d3e2331b81f
|
[
"MIT"
] | null | null | null |
from .api import BrawlStarsAPI
from .api import BrawlStarsAPIError
| 33
| 35
| 0.863636
| 8
| 66
| 7.125
| 0.625
| 0.245614
| 0.45614
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106061
| 66
| 2
| 35
| 33
| 0.966102
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e4b846d4e88a4a01e387646854c63a46fff18f8c
| 198
|
py
|
Python
|
python/dagster-fusion/tests/test_imports_dagster.py
|
roeap/flight-fusion
|
14f73c99c5214277d0abcced633d83b37f1d5292
|
[
"Apache-2.0",
"MIT"
] | 5
|
2021-12-24T06:21:40.000Z
|
2022-01-16T12:21:06.000Z
|
python/dagster-fusion/tests/test_imports_dagster.py
|
roeap/flight-fusion
|
14f73c99c5214277d0abcced633d83b37f1d5292
|
[
"Apache-2.0",
"MIT"
] | 66
|
2021-12-15T17:08:21.000Z
|
2022-03-29T10:36:18.000Z
|
python/dagster-fusion/tests/test_imports_dagster.py
|
roeap/flight-fusion
|
14f73c99c5214277d0abcced633d83b37f1d5292
|
[
"Apache-2.0",
"MIT"
] | 1
|
2022-02-08T21:07:08.000Z
|
2022-02-08T21:07:08.000Z
|
import dagster_fusion
def test_import_dagster_fusion():
assert dagster_fusion.__name__ == "dagster_fusion"
def test_dagster_fusion_version():
assert dagster_fusion.__version__ > "0.0.0"
| 19.8
| 54
| 0.782828
| 26
| 198
| 5.269231
| 0.346154
| 0.569343
| 0.277372
| 0.291971
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017442
| 0.131313
| 198
| 9
| 55
| 22
| 0.77907
| 0
| 0
| 0
| 0
| 0
| 0.09596
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.4
| true
| 0
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
9018c9ec1895f65c68abc2794f13646d7757d496
| 68,562
|
py
|
Python
|
benchmarks/SimResults/micro_pinned_train_combos/cmpA_povraygromacslibquantumbzip2/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/micro_pinned_train_combos/cmpA_povraygromacslibquantumbzip2/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/micro_pinned_train_combos/cmpA_povraygromacslibquantumbzip2/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.31626,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.451093,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.81823,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.759535,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 1.31524,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.754327,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 2.8291,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.472009,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 9.01815,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.343502,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0275337,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.313021,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.203629,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.656523,
'Execution Unit/Register Files/Runtime Dynamic': 0.231163,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.84303,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 1.88478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 5.80152,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.0011616,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.0011616,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00101462,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000394344,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00292514,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00626296,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0110348,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.195754,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.402603,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.664867,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 1.28052,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0693417,
'L2/Runtime Dynamic': 0.0136607,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 6.69495,
'Load Store Unit/Data Cache/Runtime Dynamic': 2.62506,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.176574,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.176574,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 7.53217,
'Load Store Unit/Runtime Dynamic': 3.67243,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.435401,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.870803,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.154525,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.155561,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0660188,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.825638,
'Memory Management Unit/Runtime Dynamic': 0.221579,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 30.9757,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 1.1984,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0532591,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.373546,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 1.62521,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 12.6149,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0495849,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.241635,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.261666,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.216426,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.349086,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.176207,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.741719,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.207411,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.69332,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0494344,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00907786,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0844564,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0671363,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.133891,
'Execution Unit/Register Files/Runtime Dynamic': 0.0762142,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.190325,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.469552,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.9345,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00193701,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00193701,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00172701,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000690364,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000964419,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00656545,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0171471,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0645399,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.10529,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.220422,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.219206,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 6.52304,
'Instruction Fetch Unit/Runtime Dynamic': 0.52788,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0687005,
'L2/Runtime Dynamic': 0.0149604,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.61072,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.1518,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0767916,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0767917,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.97335,
'Load Store Unit/Runtime Dynamic': 1.6073,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.189355,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.37871,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0672027,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0679998,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.255252,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0368303,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.526803,
'Memory Management Unit/Runtime Dynamic': 0.10483,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 19.3747,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.130039,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0113471,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.107479,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.248866,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.43833,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202689,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0619241,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.0998813,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0504167,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.212222,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0708235,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 3.9613,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00259738,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0187824,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0192092,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0187824,
'Execution Unit/Register Files/Runtime Dynamic': 0.0218066,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0395692,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.111314,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 0.953409,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00062807,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00062807,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000552209,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000216592,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000275942,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00208429,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0058375,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0184663,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.17461,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0835913,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0627198,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 3.45013,
'Instruction Fetch Unit/Runtime Dynamic': 0.172699,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.030093,
'L2/Runtime Dynamic': 0.0091679,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 1.55146,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.165561,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0101696,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0101696,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 1.59948,
'Load Store Unit/Runtime Dynamic': 0.225883,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0250765,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.050153,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.00889971,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.00935155,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.073033,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0137039,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.24443,
'Memory Management Unit/Runtime Dynamic': 0.0230554,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 12.8749,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00279385,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0318851,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.0346789,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 1.41889,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202689,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.244148,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.393802,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.198778,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.836729,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.279235,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.36938,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0102407,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0740531,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0757361,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0740531,
'Execution Unit/Register Files/Runtime Dynamic': 0.0859768,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.156009,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.467161,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.99793,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00201571,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00201571,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00179321,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00071471,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00108796,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00691258,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0179854,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0728071,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.63115,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.243379,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.247286,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.07443,
'Instruction Fetch Unit/Runtime Dynamic': 0.58837,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0567864,
'L2/Runtime Dynamic': 0.0161194,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.66987,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.19043,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0787053,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0787054,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 4.04154,
'Load Store Unit/Runtime Dynamic': 1.65728,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.194074,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.388148,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0688775,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0697277,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.287948,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0399064,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.562376,
'Memory Management Unit/Runtime Dynamic': 0.109634,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 19.694,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0110153,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.127029,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.138044,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.50738,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 1.9369935276373202,
'Runtime Dynamic': 1.9369935276373202,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.161329,
'Runtime Dynamic': 0.0792786,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 83.0806,
'Peak Power': 116.193,
'Runtime Dynamic': 23.0588,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 82.9193,
'Total Cores/Runtime Dynamic': 22.9795,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.161329,
'Total L3s/Runtime Dynamic': 0.0792786,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.013129
| 124
| 0.681865
| 8,082
| 68,562
| 5.77852
| 0.066691
| 0.123678
| 0.113057
| 0.093529
| 0.94026
| 0.931652
| 0.919597
| 0.889812
| 0.863432
| 0.845146
| 0
| 0.131181
| 0.224483
| 68,562
| 914
| 125
| 75.013129
| 0.747155
| 0
| 0
| 0.653173
| 0
| 0
| 0.657862
| 0.048131
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5f8b31ef3f41a62c6fd76fbfc5a97fac2028199a
| 59,490
|
py
|
Python
|
sdk/python/pulumi_consul/_inputs.py
|
pulumi/pulumi-consul
|
5b66c5b97fda6b5433bfb4d4173c999e468c82e8
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2019-11-12T12:21:18.000Z
|
2021-07-31T08:17:22.000Z
|
sdk/python/pulumi_consul/_inputs.py
|
pulumi/pulumi-consul
|
5b66c5b97fda6b5433bfb4d4173c999e468c82e8
|
[
"ECL-2.0",
"Apache-2.0"
] | 38
|
2019-11-21T15:19:33.000Z
|
2022-03-31T15:24:11.000Z
|
sdk/python/pulumi_consul/_inputs.py
|
pulumi/pulumi-consul
|
5b66c5b97fda6b5433bfb4d4173c999e468c82e8
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2020-11-24T12:23:13.000Z
|
2021-12-06T17:33:31.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = [
'AclAuthMethodNamespaceRuleArgs',
'AclRoleServiceIdentityArgs',
'CatalogEntryServiceArgs',
'KeyPrefixSubkeyCollectionArgs',
'KeysKeyArgs',
'PreparedQueryDnsArgs',
'PreparedQueryFailoverArgs',
'PreparedQueryTemplateArgs',
'ProviderHeaderArgs',
'ServiceCheckArgs',
'ServiceCheckHeaderArgs',
'GetAclRolePolicyArgs',
'GetAclRoleServiceIdentityArgs',
'GetAclTokenPolicyArgs',
'GetCatalogNodesQueryOptionArgs',
'GetCatalogServiceQueryOptionArgs',
'GetCatalogServicesQueryOptionArgs',
'GetKeyPrefixSubkeyCollectionArgs',
'GetKeysKeyArgs',
'GetNodesQueryOptionArgs',
'GetServiceQueryOptionArgs',
'GetServicesQueryOptionArgs',
]
@pulumi.input_type
class AclAuthMethodNamespaceRuleArgs:
def __init__(__self__, *,
bind_namespace: pulumi.Input[str],
selector: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] bind_namespace: If the namespace rule's `selector` matches then
this is used to control the namespace where the token is created.
:param pulumi.Input[str] selector: Specifies the expression used to match this namespace
rule against valid identities returned from an auth method validation.
Defaults to `""`.
"""
pulumi.set(__self__, "bind_namespace", bind_namespace)
if selector is not None:
pulumi.set(__self__, "selector", selector)
@property
@pulumi.getter(name="bindNamespace")
def bind_namespace(self) -> pulumi.Input[str]:
"""
If the namespace rule's `selector` matches then
this is used to control the namespace where the token is created.
"""
return pulumi.get(self, "bind_namespace")
@bind_namespace.setter
def bind_namespace(self, value: pulumi.Input[str]):
pulumi.set(self, "bind_namespace", value)
@property
@pulumi.getter
def selector(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the expression used to match this namespace
rule against valid identities returned from an auth method validation.
Defaults to `""`.
"""
return pulumi.get(self, "selector")
@selector.setter
def selector(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "selector", value)
@pulumi.input_type
class AclRoleServiceIdentityArgs:
def __init__(__self__, *,
service_name: pulumi.Input[str],
datacenters: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
:param pulumi.Input[str] service_name: The name of the service.
:param pulumi.Input[Sequence[pulumi.Input[str]]] datacenters: The datacenters the effective policy is valid within.
"""
pulumi.set(__self__, "service_name", service_name)
if datacenters is not None:
pulumi.set(__self__, "datacenters", datacenters)
@property
@pulumi.getter(name="serviceName")
def service_name(self) -> pulumi.Input[str]:
"""
The name of the service.
"""
return pulumi.get(self, "service_name")
@service_name.setter
def service_name(self, value: pulumi.Input[str]):
pulumi.set(self, "service_name", value)
@property
@pulumi.getter
def datacenters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The datacenters the effective policy is valid within.
"""
return pulumi.get(self, "datacenters")
@datacenters.setter
def datacenters(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "datacenters", value)
@pulumi.input_type
class CatalogEntryServiceArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
address: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[int]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
:param pulumi.Input[str] name: The name of the service
:param pulumi.Input[str] address: The address of the service. Defaults to the
node address.
:param pulumi.Input[str] id: The ID of the service. Defaults to the `name`.
:param pulumi.Input[int] port: The port of the service.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A list of values that are opaque to Consul,
but can be used to distinguish between services or nodes.
"""
pulumi.set(__self__, "name", name)
if address is not None:
pulumi.set(__self__, "address", address)
if id is not None:
pulumi.set(__self__, "id", id)
if port is not None:
pulumi.set(__self__, "port", port)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
The name of the service
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def address(self) -> Optional[pulumi.Input[str]]:
"""
The address of the service. Defaults to the
node address.
"""
return pulumi.get(self, "address")
@address.setter
def address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "address", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the service. Defaults to the `name`.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
"""
The port of the service.
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of values that are opaque to Consul,
but can be used to distinguish between services or nodes.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class KeyPrefixSubkeyCollectionArgs:
def __init__(__self__, *,
path: pulumi.Input[str],
value: pulumi.Input[str],
flags: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[str] path: This is the path (which will be appended to the given
`path_prefix`) in Consul that should be written to.
:param pulumi.Input[str] value: The value to write to the given path.
:param pulumi.Input[int] flags: An [unsigned integer value](https://www.consul.io/api/kv.html#flags-1)
to attach to the key (defaults to 0).
"""
pulumi.set(__self__, "path", path)
pulumi.set(__self__, "value", value)
if flags is not None:
pulumi.set(__self__, "flags", flags)
@property
@pulumi.getter
def path(self) -> pulumi.Input[str]:
"""
This is the path (which will be appended to the given
`path_prefix`) in Consul that should be written to.
"""
return pulumi.get(self, "path")
@path.setter
def path(self, value: pulumi.Input[str]):
pulumi.set(self, "path", value)
@property
@pulumi.getter
def value(self) -> pulumi.Input[str]:
"""
The value to write to the given path.
"""
return pulumi.get(self, "value")
@value.setter
def value(self, value: pulumi.Input[str]):
pulumi.set(self, "value", value)
@property
@pulumi.getter
def flags(self) -> Optional[pulumi.Input[int]]:
"""
An [unsigned integer value](https://www.consul.io/api/kv.html#flags-1)
to attach to the key (defaults to 0).
"""
return pulumi.get(self, "flags")
@flags.setter
def flags(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "flags", value)
@pulumi.input_type
class KeysKeyArgs:
def __init__(__self__, *,
path: pulumi.Input[str],
default: Optional[pulumi.Input[str]] = None,
delete: Optional[pulumi.Input[bool]] = None,
flags: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
value: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] path: This is the path in Consul that should be written to.
:param pulumi.Input[bool] delete: If true, then the key will be deleted when
either its configuration block is removed from the configuration or
the entire resource is destroyed. Otherwise, it will be left in Consul.
Defaults to false.
:param pulumi.Input[int] flags: An [unsigned integer value](https://www.consul.io/api/kv.html#flags-1)
to attach to the key (defaults to 0).
:param pulumi.Input[str] value: The value to write to the given path.
"""
pulumi.set(__self__, "path", path)
if default is not None:
pulumi.set(__self__, "default", default)
if delete is not None:
pulumi.set(__self__, "delete", delete)
if flags is not None:
pulumi.set(__self__, "flags", flags)
if name is not None:
warnings.warn("""Using consul_keys resource to *read* is deprecated; please use consul_keys data source instead""", DeprecationWarning)
pulumi.log.warn("""name is deprecated: Using consul_keys resource to *read* is deprecated; please use consul_keys data source instead""")
if name is not None:
pulumi.set(__self__, "name", name)
if value is not None:
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def path(self) -> pulumi.Input[str]:
"""
This is the path in Consul that should be written to.
"""
return pulumi.get(self, "path")
@path.setter
def path(self, value: pulumi.Input[str]):
pulumi.set(self, "path", value)
@property
@pulumi.getter
def default(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "default")
@default.setter
def default(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "default", value)
@property
@pulumi.getter
def delete(self) -> Optional[pulumi.Input[bool]]:
"""
If true, then the key will be deleted when
either its configuration block is removed from the configuration or
the entire resource is destroyed. Otherwise, it will be left in Consul.
Defaults to false.
"""
return pulumi.get(self, "delete")
@delete.setter
def delete(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "delete", value)
@property
@pulumi.getter
def flags(self) -> Optional[pulumi.Input[int]]:
"""
An [unsigned integer value](https://www.consul.io/api/kv.html#flags-1)
to attach to the key (defaults to 0).
"""
return pulumi.get(self, "flags")
@flags.setter
def flags(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "flags", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def value(self) -> Optional[pulumi.Input[str]]:
"""
The value to write to the given path.
"""
return pulumi.get(self, "value")
@value.setter
def value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "value", value)
@pulumi.input_type
class PreparedQueryDnsArgs:
def __init__(__self__, *,
ttl: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] ttl: The TTL to send when returning DNS results.
"""
if ttl is not None:
pulumi.set(__self__, "ttl", ttl)
@property
@pulumi.getter
def ttl(self) -> Optional[pulumi.Input[str]]:
"""
The TTL to send when returning DNS results.
"""
return pulumi.get(self, "ttl")
@ttl.setter
def ttl(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ttl", value)
@pulumi.input_type
class PreparedQueryFailoverArgs:
def __init__(__self__, *,
datacenters: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
nearest_n: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[Sequence[pulumi.Input[str]]] datacenters: Remote datacenters to return results from.
:param pulumi.Input[int] nearest_n: Return results from this many datacenters,
sorted in ascending order of estimated RTT.
"""
if datacenters is not None:
pulumi.set(__self__, "datacenters", datacenters)
if nearest_n is not None:
pulumi.set(__self__, "nearest_n", nearest_n)
@property
@pulumi.getter
def datacenters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Remote datacenters to return results from.
"""
return pulumi.get(self, "datacenters")
@datacenters.setter
def datacenters(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "datacenters", value)
@property
@pulumi.getter(name="nearestN")
def nearest_n(self) -> Optional[pulumi.Input[int]]:
"""
Return results from this many datacenters,
sorted in ascending order of estimated RTT.
"""
return pulumi.get(self, "nearest_n")
@nearest_n.setter
def nearest_n(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "nearest_n", value)
@pulumi.input_type
class PreparedQueryTemplateArgs:
def __init__(__self__, *,
regexp: pulumi.Input[str],
type: pulumi.Input[str]):
"""
:param pulumi.Input[str] regexp: The regular expression to match with. When using
`name_prefix_match`, this regex is applied against the query name.
:param pulumi.Input[str] type: The type of template matching to perform. Currently
only `name_prefix_match` is supported.
"""
pulumi.set(__self__, "regexp", regexp)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def regexp(self) -> pulumi.Input[str]:
"""
The regular expression to match with. When using
`name_prefix_match`, this regex is applied against the query name.
"""
return pulumi.get(self, "regexp")
@regexp.setter
def regexp(self, value: pulumi.Input[str]):
pulumi.set(self, "regexp", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
The type of template matching to perform. Currently
only `name_prefix_match` is supported.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@pulumi.input_type
class ProviderHeaderArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
value: pulumi.Input[str]):
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def value(self) -> pulumi.Input[str]:
return pulumi.get(self, "value")
@value.setter
def value(self, value: pulumi.Input[str]):
pulumi.set(self, "value", value)
@pulumi.input_type
class ServiceCheckArgs:
def __init__(__self__, *,
check_id: pulumi.Input[str],
interval: pulumi.Input[str],
name: pulumi.Input[str],
timeout: pulumi.Input[str],
deregister_critical_service_after: Optional[pulumi.Input[str]] = None,
headers: Optional[pulumi.Input[Sequence[pulumi.Input['ServiceCheckHeaderArgs']]]] = None,
http: Optional[pulumi.Input[str]] = None,
method: Optional[pulumi.Input[str]] = None,
notes: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
tcp: Optional[pulumi.Input[str]] = None,
tls_skip_verify: Optional[pulumi.Input[bool]] = None):
"""
:param pulumi.Input[str] check_id: An ID, *unique per agent*. Will default to *name*
if not set.
:param pulumi.Input[str] interval: The interval to wait between each health-check
invocation.
:param pulumi.Input[str] name: The name of the health-check.
:param pulumi.Input[str] timeout: The timeout value for HTTP checks.
:param pulumi.Input[str] deregister_critical_service_after: The time after which
the service is automatically deregistered when in the `critical` state.
Defaults to `30s`.
:param pulumi.Input[Sequence[pulumi.Input['ServiceCheckHeaderArgs']]] headers: The headers to send for an HTTP check.
The attributes of each header is given below.
:param pulumi.Input[str] http: The HTTP endpoint to call for an HTTP check.
:param pulumi.Input[str] method: The method to use for HTTP health-checks. Defaults
to `GET`.
:param pulumi.Input[str] notes: An opaque field meant to hold human readable text.
:param pulumi.Input[str] status: The initial health-check status.
:param pulumi.Input[str] tcp: The TCP address and port to connect to for a TCP check.
:param pulumi.Input[bool] tls_skip_verify: Whether to deactivate certificate
verification for HTTP health-checks. Defaults to `false`.
"""
pulumi.set(__self__, "check_id", check_id)
pulumi.set(__self__, "interval", interval)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "timeout", timeout)
if deregister_critical_service_after is not None:
pulumi.set(__self__, "deregister_critical_service_after", deregister_critical_service_after)
if headers is not None:
pulumi.set(__self__, "headers", headers)
if http is not None:
pulumi.set(__self__, "http", http)
if method is not None:
pulumi.set(__self__, "method", method)
if notes is not None:
pulumi.set(__self__, "notes", notes)
if status is not None:
pulumi.set(__self__, "status", status)
if tcp is not None:
pulumi.set(__self__, "tcp", tcp)
if tls_skip_verify is not None:
pulumi.set(__self__, "tls_skip_verify", tls_skip_verify)
@property
@pulumi.getter(name="checkId")
def check_id(self) -> pulumi.Input[str]:
"""
An ID, *unique per agent*. Will default to *name*
if not set.
"""
return pulumi.get(self, "check_id")
@check_id.setter
def check_id(self, value: pulumi.Input[str]):
pulumi.set(self, "check_id", value)
@property
@pulumi.getter
def interval(self) -> pulumi.Input[str]:
"""
The interval to wait between each health-check
invocation.
"""
return pulumi.get(self, "interval")
@interval.setter
def interval(self, value: pulumi.Input[str]):
pulumi.set(self, "interval", value)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
The name of the health-check.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def timeout(self) -> pulumi.Input[str]:
"""
The timeout value for HTTP checks.
"""
return pulumi.get(self, "timeout")
@timeout.setter
def timeout(self, value: pulumi.Input[str]):
pulumi.set(self, "timeout", value)
@property
@pulumi.getter(name="deregisterCriticalServiceAfter")
def deregister_critical_service_after(self) -> Optional[pulumi.Input[str]]:
"""
The time after which
the service is automatically deregistered when in the `critical` state.
Defaults to `30s`.
"""
return pulumi.get(self, "deregister_critical_service_after")
@deregister_critical_service_after.setter
def deregister_critical_service_after(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "deregister_critical_service_after", value)
@property
@pulumi.getter
def headers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ServiceCheckHeaderArgs']]]]:
"""
The headers to send for an HTTP check.
The attributes of each header is given below.
"""
return pulumi.get(self, "headers")
@headers.setter
def headers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ServiceCheckHeaderArgs']]]]):
pulumi.set(self, "headers", value)
@property
@pulumi.getter
def http(self) -> Optional[pulumi.Input[str]]:
"""
The HTTP endpoint to call for an HTTP check.
"""
return pulumi.get(self, "http")
@http.setter
def http(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "http", value)
@property
@pulumi.getter
def method(self) -> Optional[pulumi.Input[str]]:
"""
The method to use for HTTP health-checks. Defaults
to `GET`.
"""
return pulumi.get(self, "method")
@method.setter
def method(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "method", value)
@property
@pulumi.getter
def notes(self) -> Optional[pulumi.Input[str]]:
"""
An opaque field meant to hold human readable text.
"""
return pulumi.get(self, "notes")
@notes.setter
def notes(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "notes", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
The initial health-check status.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def tcp(self) -> Optional[pulumi.Input[str]]:
"""
The TCP address and port to connect to for a TCP check.
"""
return pulumi.get(self, "tcp")
@tcp.setter
def tcp(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tcp", value)
@property
@pulumi.getter(name="tlsSkipVerify")
def tls_skip_verify(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to deactivate certificate
verification for HTTP health-checks. Defaults to `false`.
"""
return pulumi.get(self, "tls_skip_verify")
@tls_skip_verify.setter
def tls_skip_verify(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "tls_skip_verify", value)
@pulumi.input_type
class ServiceCheckHeaderArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
values: pulumi.Input[Sequence[pulumi.Input[str]]]):
"""
:param pulumi.Input[str] name: The name of the header.
:param pulumi.Input[Sequence[pulumi.Input[str]]] values: The header's list of values.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
The name of the header.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
The header's list of values.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetAclRolePolicyArgs:
def __init__(__self__, *,
id: str,
name: str):
"""
:param str name: The name of the ACL Role.
"""
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def id(self) -> str:
return pulumi.get(self, "id")
@id.setter
def id(self, value: str):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the ACL Role.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@pulumi.input_type
class GetAclRoleServiceIdentityArgs:
def __init__(__self__, *,
datacenters: Optional[Sequence[str]] = None,
service_name: Optional[str] = None):
if datacenters is not None:
pulumi.set(__self__, "datacenters", datacenters)
if service_name is not None:
pulumi.set(__self__, "service_name", service_name)
@property
@pulumi.getter
def datacenters(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "datacenters")
@datacenters.setter
def datacenters(self, value: Optional[Sequence[str]]):
pulumi.set(self, "datacenters", value)
@property
@pulumi.getter(name="serviceName")
def service_name(self) -> Optional[str]:
return pulumi.get(self, "service_name")
@service_name.setter
def service_name(self, value: Optional[str]):
pulumi.set(self, "service_name", value)
@pulumi.input_type
class GetAclTokenPolicyArgs:
def __init__(__self__, *,
id: str,
name: str):
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def id(self) -> str:
return pulumi.get(self, "id")
@id.setter
def id(self, value: str):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@pulumi.input_type
class GetCatalogNodesQueryOptionArgs:
def __init__(__self__, *,
allow_stale: Optional[bool] = None,
datacenter: Optional[str] = None,
near: Optional[str] = None,
node_meta: Optional[Mapping[str, str]] = None,
require_consistent: Optional[bool] = None,
token: Optional[str] = None,
wait_index: Optional[int] = None,
wait_time: Optional[str] = None):
if allow_stale is not None:
pulumi.set(__self__, "allow_stale", allow_stale)
if datacenter is not None:
pulumi.set(__self__, "datacenter", datacenter)
if near is not None:
pulumi.set(__self__, "near", near)
if node_meta is not None:
pulumi.set(__self__, "node_meta", node_meta)
if require_consistent is not None:
pulumi.set(__self__, "require_consistent", require_consistent)
if token is not None:
pulumi.set(__self__, "token", token)
if wait_index is not None:
pulumi.set(__self__, "wait_index", wait_index)
if wait_time is not None:
pulumi.set(__self__, "wait_time", wait_time)
@property
@pulumi.getter(name="allowStale")
def allow_stale(self) -> Optional[bool]:
return pulumi.get(self, "allow_stale")
@allow_stale.setter
def allow_stale(self, value: Optional[bool]):
pulumi.set(self, "allow_stale", value)
@property
@pulumi.getter
def datacenter(self) -> Optional[str]:
return pulumi.get(self, "datacenter")
@datacenter.setter
def datacenter(self, value: Optional[str]):
pulumi.set(self, "datacenter", value)
@property
@pulumi.getter
def near(self) -> Optional[str]:
return pulumi.get(self, "near")
@near.setter
def near(self, value: Optional[str]):
pulumi.set(self, "near", value)
@property
@pulumi.getter(name="nodeMeta")
def node_meta(self) -> Optional[Mapping[str, str]]:
return pulumi.get(self, "node_meta")
@node_meta.setter
def node_meta(self, value: Optional[Mapping[str, str]]):
pulumi.set(self, "node_meta", value)
@property
@pulumi.getter(name="requireConsistent")
def require_consistent(self) -> Optional[bool]:
return pulumi.get(self, "require_consistent")
@require_consistent.setter
def require_consistent(self, value: Optional[bool]):
pulumi.set(self, "require_consistent", value)
@property
@pulumi.getter
def token(self) -> Optional[str]:
return pulumi.get(self, "token")
@token.setter
def token(self, value: Optional[str]):
pulumi.set(self, "token", value)
@property
@pulumi.getter(name="waitIndex")
def wait_index(self) -> Optional[int]:
return pulumi.get(self, "wait_index")
@wait_index.setter
def wait_index(self, value: Optional[int]):
pulumi.set(self, "wait_index", value)
@property
@pulumi.getter(name="waitTime")
def wait_time(self) -> Optional[str]:
return pulumi.get(self, "wait_time")
@wait_time.setter
def wait_time(self, value: Optional[str]):
pulumi.set(self, "wait_time", value)
@pulumi.input_type
class GetCatalogServiceQueryOptionArgs:
def __init__(__self__, *,
allow_stale: Optional[bool] = None,
datacenter: Optional[str] = None,
namespace: Optional[str] = None,
near: Optional[str] = None,
node_meta: Optional[Mapping[str, str]] = None,
require_consistent: Optional[bool] = None,
token: Optional[str] = None,
wait_index: Optional[int] = None,
wait_time: Optional[str] = None):
if allow_stale is not None:
pulumi.set(__self__, "allow_stale", allow_stale)
if datacenter is not None:
pulumi.set(__self__, "datacenter", datacenter)
if namespace is not None:
pulumi.set(__self__, "namespace", namespace)
if near is not None:
pulumi.set(__self__, "near", near)
if node_meta is not None:
pulumi.set(__self__, "node_meta", node_meta)
if require_consistent is not None:
pulumi.set(__self__, "require_consistent", require_consistent)
if token is not None:
pulumi.set(__self__, "token", token)
if wait_index is not None:
pulumi.set(__self__, "wait_index", wait_index)
if wait_time is not None:
pulumi.set(__self__, "wait_time", wait_time)
@property
@pulumi.getter(name="allowStale")
def allow_stale(self) -> Optional[bool]:
return pulumi.get(self, "allow_stale")
@allow_stale.setter
def allow_stale(self, value: Optional[bool]):
pulumi.set(self, "allow_stale", value)
@property
@pulumi.getter
def datacenter(self) -> Optional[str]:
return pulumi.get(self, "datacenter")
@datacenter.setter
def datacenter(self, value: Optional[str]):
pulumi.set(self, "datacenter", value)
@property
@pulumi.getter
def namespace(self) -> Optional[str]:
return pulumi.get(self, "namespace")
@namespace.setter
def namespace(self, value: Optional[str]):
pulumi.set(self, "namespace", value)
@property
@pulumi.getter
def near(self) -> Optional[str]:
return pulumi.get(self, "near")
@near.setter
def near(self, value: Optional[str]):
pulumi.set(self, "near", value)
@property
@pulumi.getter(name="nodeMeta")
def node_meta(self) -> Optional[Mapping[str, str]]:
return pulumi.get(self, "node_meta")
@node_meta.setter
def node_meta(self, value: Optional[Mapping[str, str]]):
pulumi.set(self, "node_meta", value)
@property
@pulumi.getter(name="requireConsistent")
def require_consistent(self) -> Optional[bool]:
return pulumi.get(self, "require_consistent")
@require_consistent.setter
def require_consistent(self, value: Optional[bool]):
pulumi.set(self, "require_consistent", value)
@property
@pulumi.getter
def token(self) -> Optional[str]:
return pulumi.get(self, "token")
@token.setter
def token(self, value: Optional[str]):
pulumi.set(self, "token", value)
@property
@pulumi.getter(name="waitIndex")
def wait_index(self) -> Optional[int]:
return pulumi.get(self, "wait_index")
@wait_index.setter
def wait_index(self, value: Optional[int]):
pulumi.set(self, "wait_index", value)
@property
@pulumi.getter(name="waitTime")
def wait_time(self) -> Optional[str]:
return pulumi.get(self, "wait_time")
@wait_time.setter
def wait_time(self, value: Optional[str]):
pulumi.set(self, "wait_time", value)
@pulumi.input_type
class GetCatalogServicesQueryOptionArgs:
def __init__(__self__, *,
allow_stale: Optional[bool] = None,
datacenter: Optional[str] = None,
namespace: Optional[str] = None,
near: Optional[str] = None,
node_meta: Optional[Mapping[str, str]] = None,
require_consistent: Optional[bool] = None,
token: Optional[str] = None,
wait_index: Optional[int] = None,
wait_time: Optional[str] = None):
if allow_stale is not None:
pulumi.set(__self__, "allow_stale", allow_stale)
if datacenter is not None:
pulumi.set(__self__, "datacenter", datacenter)
if namespace is not None:
pulumi.set(__self__, "namespace", namespace)
if near is not None:
pulumi.set(__self__, "near", near)
if node_meta is not None:
pulumi.set(__self__, "node_meta", node_meta)
if require_consistent is not None:
pulumi.set(__self__, "require_consistent", require_consistent)
if token is not None:
pulumi.set(__self__, "token", token)
if wait_index is not None:
pulumi.set(__self__, "wait_index", wait_index)
if wait_time is not None:
pulumi.set(__self__, "wait_time", wait_time)
@property
@pulumi.getter(name="allowStale")
def allow_stale(self) -> Optional[bool]:
return pulumi.get(self, "allow_stale")
@allow_stale.setter
def allow_stale(self, value: Optional[bool]):
pulumi.set(self, "allow_stale", value)
@property
@pulumi.getter
def datacenter(self) -> Optional[str]:
return pulumi.get(self, "datacenter")
@datacenter.setter
def datacenter(self, value: Optional[str]):
pulumi.set(self, "datacenter", value)
@property
@pulumi.getter
def namespace(self) -> Optional[str]:
return pulumi.get(self, "namespace")
@namespace.setter
def namespace(self, value: Optional[str]):
pulumi.set(self, "namespace", value)
@property
@pulumi.getter
def near(self) -> Optional[str]:
return pulumi.get(self, "near")
@near.setter
def near(self, value: Optional[str]):
pulumi.set(self, "near", value)
@property
@pulumi.getter(name="nodeMeta")
def node_meta(self) -> Optional[Mapping[str, str]]:
return pulumi.get(self, "node_meta")
@node_meta.setter
def node_meta(self, value: Optional[Mapping[str, str]]):
pulumi.set(self, "node_meta", value)
@property
@pulumi.getter(name="requireConsistent")
def require_consistent(self) -> Optional[bool]:
return pulumi.get(self, "require_consistent")
@require_consistent.setter
def require_consistent(self, value: Optional[bool]):
pulumi.set(self, "require_consistent", value)
@property
@pulumi.getter
def token(self) -> Optional[str]:
return pulumi.get(self, "token")
@token.setter
def token(self, value: Optional[str]):
pulumi.set(self, "token", value)
@property
@pulumi.getter(name="waitIndex")
def wait_index(self) -> Optional[int]:
return pulumi.get(self, "wait_index")
@wait_index.setter
def wait_index(self, value: Optional[int]):
pulumi.set(self, "wait_index", value)
@property
@pulumi.getter(name="waitTime")
def wait_time(self) -> Optional[str]:
return pulumi.get(self, "wait_time")
@wait_time.setter
def wait_time(self, value: Optional[str]):
pulumi.set(self, "wait_time", value)
@pulumi.input_type
class GetKeyPrefixSubkeyCollectionArgs:
def __init__(__self__, *,
name: str,
path: str,
default: Optional[str] = None):
"""
:param str name: This is the name of the key. This value of the
key is exposed as `var.<name>`. This is not the path of the subkey
in Consul.
:param str path: This is the subkey path in Consul (which will be appended
to the given `path_prefix`) to construct the full key that will be used
to read the value.
:param str default: This is the default value to set for `var.<name>`
if the key does not exist in Consul. Defaults to an empty string.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "path", path)
if default is not None:
pulumi.set(__self__, "default", default)
@property
@pulumi.getter
def name(self) -> str:
"""
This is the name of the key. This value of the
key is exposed as `var.<name>`. This is not the path of the subkey
in Consul.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def path(self) -> str:
"""
This is the subkey path in Consul (which will be appended
to the given `path_prefix`) to construct the full key that will be used
to read the value.
"""
return pulumi.get(self, "path")
@path.setter
def path(self, value: str):
pulumi.set(self, "path", value)
@property
@pulumi.getter
def default(self) -> Optional[str]:
"""
This is the default value to set for `var.<name>`
if the key does not exist in Consul. Defaults to an empty string.
"""
return pulumi.get(self, "default")
@default.setter
def default(self, value: Optional[str]):
pulumi.set(self, "default", value)
@pulumi.input_type
class GetKeysKeyArgs:
def __init__(__self__, *,
name: str,
path: str,
default: Optional[str] = None):
"""
:param str name: This is the name of the key. This value of the
key is exposed as `var.<name>`. This is not the path of the key
in Consul.
:param str path: This is the path in Consul that should be read
or written to.
:param str default: This is the default value to set for `var.<name>`
if the key does not exist in Consul. Defaults to an empty string.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "path", path)
if default is not None:
pulumi.set(__self__, "default", default)
@property
@pulumi.getter
def name(self) -> str:
"""
This is the name of the key. This value of the
key is exposed as `var.<name>`. This is not the path of the key
in Consul.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def path(self) -> str:
"""
This is the path in Consul that should be read
or written to.
"""
return pulumi.get(self, "path")
@path.setter
def path(self, value: str):
pulumi.set(self, "path", value)
@property
@pulumi.getter
def default(self) -> Optional[str]:
"""
This is the default value to set for `var.<name>`
if the key does not exist in Consul. Defaults to an empty string.
"""
return pulumi.get(self, "default")
@default.setter
def default(self, value: Optional[str]):
pulumi.set(self, "default", value)
@pulumi.input_type
class GetNodesQueryOptionArgs:
def __init__(__self__, *,
allow_stale: Optional[bool] = None,
datacenter: Optional[str] = None,
near: Optional[str] = None,
node_meta: Optional[Mapping[str, str]] = None,
require_consistent: Optional[bool] = None,
token: Optional[str] = None,
wait_index: Optional[int] = None,
wait_time: Optional[str] = None):
"""
:param bool allow_stale: When `true`, the default, allow responses from
Consul servers that are followers.
:param str datacenter: The Consul datacenter to query. Defaults to the
same value found in `query_options` parameter specified below, or if that is
empty, the `datacenter` value found in the Consul agent that this provider is
configured to talk to then the datacenter in the provider setup.
:param bool require_consistent: When `true` force the client to perform a
read on at least quorum servers and verify the result is the same. Defaults
to `false`.
:param str token: Specify the Consul ACL token to use when performing the
request. This defaults to the same API token configured by the `consul`
provider but may be overridden if necessary.
:param int wait_index: Index number used to enable blocking queries.
:param str wait_time: Max time the client should wait for a blocking query
to return.
"""
if allow_stale is not None:
pulumi.set(__self__, "allow_stale", allow_stale)
if datacenter is not None:
pulumi.set(__self__, "datacenter", datacenter)
if near is not None:
pulumi.set(__self__, "near", near)
if node_meta is not None:
pulumi.set(__self__, "node_meta", node_meta)
if require_consistent is not None:
pulumi.set(__self__, "require_consistent", require_consistent)
if token is not None:
pulumi.set(__self__, "token", token)
if wait_index is not None:
pulumi.set(__self__, "wait_index", wait_index)
if wait_time is not None:
pulumi.set(__self__, "wait_time", wait_time)
@property
@pulumi.getter(name="allowStale")
def allow_stale(self) -> Optional[bool]:
"""
When `true`, the default, allow responses from
Consul servers that are followers.
"""
return pulumi.get(self, "allow_stale")
@allow_stale.setter
def allow_stale(self, value: Optional[bool]):
pulumi.set(self, "allow_stale", value)
@property
@pulumi.getter
def datacenter(self) -> Optional[str]:
"""
The Consul datacenter to query. Defaults to the
same value found in `query_options` parameter specified below, or if that is
empty, the `datacenter` value found in the Consul agent that this provider is
configured to talk to then the datacenter in the provider setup.
"""
return pulumi.get(self, "datacenter")
@datacenter.setter
def datacenter(self, value: Optional[str]):
pulumi.set(self, "datacenter", value)
@property
@pulumi.getter
def near(self) -> Optional[str]:
return pulumi.get(self, "near")
@near.setter
def near(self, value: Optional[str]):
pulumi.set(self, "near", value)
@property
@pulumi.getter(name="nodeMeta")
def node_meta(self) -> Optional[Mapping[str, str]]:
return pulumi.get(self, "node_meta")
@node_meta.setter
def node_meta(self, value: Optional[Mapping[str, str]]):
pulumi.set(self, "node_meta", value)
@property
@pulumi.getter(name="requireConsistent")
def require_consistent(self) -> Optional[bool]:
"""
When `true` force the client to perform a
read on at least quorum servers and verify the result is the same. Defaults
to `false`.
"""
return pulumi.get(self, "require_consistent")
@require_consistent.setter
def require_consistent(self, value: Optional[bool]):
pulumi.set(self, "require_consistent", value)
@property
@pulumi.getter
def token(self) -> Optional[str]:
"""
Specify the Consul ACL token to use when performing the
request. This defaults to the same API token configured by the `consul`
provider but may be overridden if necessary.
"""
return pulumi.get(self, "token")
@token.setter
def token(self, value: Optional[str]):
pulumi.set(self, "token", value)
@property
@pulumi.getter(name="waitIndex")
def wait_index(self) -> Optional[int]:
"""
Index number used to enable blocking queries.
"""
return pulumi.get(self, "wait_index")
@wait_index.setter
def wait_index(self, value: Optional[int]):
pulumi.set(self, "wait_index", value)
@property
@pulumi.getter(name="waitTime")
def wait_time(self) -> Optional[str]:
"""
Max time the client should wait for a blocking query
to return.
"""
return pulumi.get(self, "wait_time")
@wait_time.setter
def wait_time(self, value: Optional[str]):
pulumi.set(self, "wait_time", value)
@pulumi.input_type
class GetServiceQueryOptionArgs:
def __init__(__self__, *,
allow_stale: Optional[bool] = None,
datacenter: Optional[str] = None,
namespace: Optional[str] = None,
near: Optional[str] = None,
node_meta: Optional[Mapping[str, str]] = None,
require_consistent: Optional[bool] = None,
token: Optional[str] = None,
wait_index: Optional[int] = None,
wait_time: Optional[str] = None):
"""
:param bool allow_stale: When `true`, the default, allow responses from
Consul servers that are followers.
:param str datacenter: The Consul datacenter to query. Defaults to the
same value found in `query_options` parameter specified below, or if that is
empty, the `datacenter` value found in the Consul agent that this provider is
configured to talk to.
:param str namespace: The namespace to lookup the service.
:param bool require_consistent: When `true` force the client to perform a
read on at least quorum servers and verify the result is the same. Defaults
to `false`.
:param str token: Specify the Consul ACL token to use when performing the
request. This defaults to the same API token configured by the `consul`
provider but may be overridden if necessary.
:param int wait_index: Index number used to enable blocking queries.
:param str wait_time: Max time the client should wait for a blocking query
to return.
"""
if allow_stale is not None:
pulumi.set(__self__, "allow_stale", allow_stale)
if datacenter is not None:
pulumi.set(__self__, "datacenter", datacenter)
if namespace is not None:
pulumi.set(__self__, "namespace", namespace)
if near is not None:
pulumi.set(__self__, "near", near)
if node_meta is not None:
pulumi.set(__self__, "node_meta", node_meta)
if require_consistent is not None:
pulumi.set(__self__, "require_consistent", require_consistent)
if token is not None:
pulumi.set(__self__, "token", token)
if wait_index is not None:
pulumi.set(__self__, "wait_index", wait_index)
if wait_time is not None:
pulumi.set(__self__, "wait_time", wait_time)
@property
@pulumi.getter(name="allowStale")
def allow_stale(self) -> Optional[bool]:
"""
When `true`, the default, allow responses from
Consul servers that are followers.
"""
return pulumi.get(self, "allow_stale")
@allow_stale.setter
def allow_stale(self, value: Optional[bool]):
pulumi.set(self, "allow_stale", value)
@property
@pulumi.getter
def datacenter(self) -> Optional[str]:
"""
The Consul datacenter to query. Defaults to the
same value found in `query_options` parameter specified below, or if that is
empty, the `datacenter` value found in the Consul agent that this provider is
configured to talk to.
"""
return pulumi.get(self, "datacenter")
@datacenter.setter
def datacenter(self, value: Optional[str]):
pulumi.set(self, "datacenter", value)
@property
@pulumi.getter
def namespace(self) -> Optional[str]:
"""
The namespace to lookup the service.
"""
return pulumi.get(self, "namespace")
@namespace.setter
def namespace(self, value: Optional[str]):
pulumi.set(self, "namespace", value)
@property
@pulumi.getter
def near(self) -> Optional[str]:
return pulumi.get(self, "near")
@near.setter
def near(self, value: Optional[str]):
pulumi.set(self, "near", value)
@property
@pulumi.getter(name="nodeMeta")
def node_meta(self) -> Optional[Mapping[str, str]]:
return pulumi.get(self, "node_meta")
@node_meta.setter
def node_meta(self, value: Optional[Mapping[str, str]]):
pulumi.set(self, "node_meta", value)
@property
@pulumi.getter(name="requireConsistent")
def require_consistent(self) -> Optional[bool]:
"""
When `true` force the client to perform a
read on at least quorum servers and verify the result is the same. Defaults
to `false`.
"""
return pulumi.get(self, "require_consistent")
@require_consistent.setter
def require_consistent(self, value: Optional[bool]):
pulumi.set(self, "require_consistent", value)
@property
@pulumi.getter
def token(self) -> Optional[str]:
"""
Specify the Consul ACL token to use when performing the
request. This defaults to the same API token configured by the `consul`
provider but may be overridden if necessary.
"""
return pulumi.get(self, "token")
@token.setter
def token(self, value: Optional[str]):
pulumi.set(self, "token", value)
@property
@pulumi.getter(name="waitIndex")
def wait_index(self) -> Optional[int]:
"""
Index number used to enable blocking queries.
"""
return pulumi.get(self, "wait_index")
@wait_index.setter
def wait_index(self, value: Optional[int]):
pulumi.set(self, "wait_index", value)
@property
@pulumi.getter(name="waitTime")
def wait_time(self) -> Optional[str]:
"""
Max time the client should wait for a blocking query
to return.
"""
return pulumi.get(self, "wait_time")
@wait_time.setter
def wait_time(self, value: Optional[str]):
pulumi.set(self, "wait_time", value)
@pulumi.input_type
class GetServicesQueryOptionArgs:
def __init__(__self__, *,
allow_stale: Optional[bool] = None,
datacenter: Optional[str] = None,
namespace: Optional[str] = None,
near: Optional[str] = None,
node_meta: Optional[Mapping[str, str]] = None,
require_consistent: Optional[bool] = None,
token: Optional[str] = None,
wait_index: Optional[int] = None,
wait_time: Optional[str] = None):
"""
:param bool allow_stale: When `true`, the default, allow responses from
Consul servers that are followers.
:param str datacenter: The Consul datacenter to query. Defaults to the
same value found in `query_options` parameter specified below, or if that is
empty, the `datacenter` value found in the Consul agent that this provider is
configured to talk to.
:param str namespace: The namespace to lookup the services.
:param bool require_consistent: When `true` force the client to perform a
read on at least quorum servers and verify the result is the same. Defaults
to `false`.
:param str token: Specify the Consul ACL token to use when performing the
request. This defaults to the same API token configured by the `consul`
provider but may be overridden if necessary.
:param int wait_index: Index number used to enable blocking queries.
:param str wait_time: Max time the client should wait for a blocking query
to return.
"""
if allow_stale is not None:
pulumi.set(__self__, "allow_stale", allow_stale)
if datacenter is not None:
pulumi.set(__self__, "datacenter", datacenter)
if namespace is not None:
pulumi.set(__self__, "namespace", namespace)
if near is not None:
pulumi.set(__self__, "near", near)
if node_meta is not None:
pulumi.set(__self__, "node_meta", node_meta)
if require_consistent is not None:
pulumi.set(__self__, "require_consistent", require_consistent)
if token is not None:
pulumi.set(__self__, "token", token)
if wait_index is not None:
pulumi.set(__self__, "wait_index", wait_index)
if wait_time is not None:
pulumi.set(__self__, "wait_time", wait_time)
@property
@pulumi.getter(name="allowStale")
def allow_stale(self) -> Optional[bool]:
"""
When `true`, the default, allow responses from
Consul servers that are followers.
"""
return pulumi.get(self, "allow_stale")
@allow_stale.setter
def allow_stale(self, value: Optional[bool]):
pulumi.set(self, "allow_stale", value)
@property
@pulumi.getter
def datacenter(self) -> Optional[str]:
"""
The Consul datacenter to query. Defaults to the
same value found in `query_options` parameter specified below, or if that is
empty, the `datacenter` value found in the Consul agent that this provider is
configured to talk to.
"""
return pulumi.get(self, "datacenter")
@datacenter.setter
def datacenter(self, value: Optional[str]):
pulumi.set(self, "datacenter", value)
@property
@pulumi.getter
def namespace(self) -> Optional[str]:
"""
The namespace to lookup the services.
"""
return pulumi.get(self, "namespace")
@namespace.setter
def namespace(self, value: Optional[str]):
pulumi.set(self, "namespace", value)
@property
@pulumi.getter
def near(self) -> Optional[str]:
return pulumi.get(self, "near")
@near.setter
def near(self, value: Optional[str]):
pulumi.set(self, "near", value)
@property
@pulumi.getter(name="nodeMeta")
def node_meta(self) -> Optional[Mapping[str, str]]:
return pulumi.get(self, "node_meta")
@node_meta.setter
def node_meta(self, value: Optional[Mapping[str, str]]):
pulumi.set(self, "node_meta", value)
@property
@pulumi.getter(name="requireConsistent")
def require_consistent(self) -> Optional[bool]:
"""
When `true` force the client to perform a
read on at least quorum servers and verify the result is the same. Defaults
to `false`.
"""
return pulumi.get(self, "require_consistent")
@require_consistent.setter
def require_consistent(self, value: Optional[bool]):
pulumi.set(self, "require_consistent", value)
@property
@pulumi.getter
def token(self) -> Optional[str]:
"""
Specify the Consul ACL token to use when performing the
request. This defaults to the same API token configured by the `consul`
provider but may be overridden if necessary.
"""
return pulumi.get(self, "token")
@token.setter
def token(self, value: Optional[str]):
pulumi.set(self, "token", value)
@property
@pulumi.getter(name="waitIndex")
def wait_index(self) -> Optional[int]:
"""
Index number used to enable blocking queries.
"""
return pulumi.get(self, "wait_index")
@wait_index.setter
def wait_index(self, value: Optional[int]):
pulumi.set(self, "wait_index", value)
@property
@pulumi.getter(name="waitTime")
def wait_time(self) -> Optional[str]:
"""
Max time the client should wait for a blocking query
to return.
"""
return pulumi.get(self, "wait_time")
@wait_time.setter
def wait_time(self, value: Optional[str]):
pulumi.set(self, "wait_time", value)
| 33.897436
| 149
| 0.611212
| 7,141
| 59,490
| 4.949727
| 0.044672
| 0.052453
| 0.075765
| 0.055367
| 0.894528
| 0.848752
| 0.815453
| 0.787784
| 0.767951
| 0.722826
| 0
| 0.000303
| 0.277626
| 59,490
| 1,754
| 150
| 33.916762
| 0.822195
| 0.224038
| 0
| 0.76721
| 1
| 0
| 0.085579
| 0.01453
| 0
| 0
| 0
| 0
| 0
| 1
| 0.206522
| false
| 0
| 0.004529
| 0.037138
| 0.324275
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
39766684fb0c01a1cd78b752933f53ca22adbf4a
| 22,531
|
py
|
Python
|
scenegraph/exp-official/taskographyv2medium1_FF-X/taskographyv2medium1_FF-X_test.py
|
taskography/3dscenegraph-dev
|
2c261241230fbea1f1c687ff793478248f25c02c
|
[
"MIT"
] | 1
|
2022-01-30T22:06:57.000Z
|
2022-01-30T22:06:57.000Z
|
scenegraph/exp-official/taskographyv2medium1_FF-X/taskographyv2medium1_FF-X_test.py
|
taskography/3dscenegraph-dev
|
2c261241230fbea1f1c687ff793478248f25c02c
|
[
"MIT"
] | null | null | null |
scenegraph/exp-official/taskographyv2medium1_FF-X/taskographyv2medium1_FF-X_test.py
|
taskography/3dscenegraph-dev
|
2c261241230fbea1f1c687ff793478248f25c02c
|
[
"MIT"
] | null | null | null |
STATS = [
{
"num_node_expansions": 25,
"plan_length": 20,
"search_time": 0.04,
"total_time": 0.04
},
{
"num_node_expansions": 22,
"plan_length": 18,
"search_time": 0.04,
"total_time": 0.04
},
{
"num_node_expansions": 24,
"plan_length": 21,
"search_time": 0.33,
"total_time": 0.33
},
{
"num_node_expansions": 26,
"plan_length": 20,
"search_time": 0.41,
"total_time": 0.41
},
{
"num_node_expansions": 19,
"plan_length": 14,
"search_time": 0.44,
"total_time": 0.44
},
{
"num_node_expansions": 28,
"plan_length": 24,
"search_time": 0.76,
"total_time": 0.76
},
{
"num_node_expansions": 22,
"plan_length": 18,
"search_time": 0.15,
"total_time": 0.15
},
{
"num_node_expansions": 19,
"plan_length": 17,
"search_time": 0.15,
"total_time": 0.15
},
{
"num_node_expansions": 19,
"plan_length": 17,
"search_time": 0.03,
"total_time": 0.03
},
{
"num_node_expansions": 16,
"plan_length": 12,
"search_time": 0.02,
"total_time": 0.02
},
{
"num_node_expansions": 13,
"plan_length": 11,
"search_time": 0.1,
"total_time": 0.1
},
{
"num_node_expansions": 17,
"plan_length": 15,
"search_time": 0.1,
"total_time": 0.1
},
{
"num_node_expansions": 18,
"plan_length": 15,
"search_time": 0.03,
"total_time": 0.03
},
{
"num_node_expansions": 12,
"plan_length": 10,
"search_time": 0.03,
"total_time": 0.03
},
{
"num_node_expansions": 18,
"plan_length": 16,
"search_time": 0.04,
"total_time": 0.04
},
{
"num_node_expansions": 15,
"plan_length": 13,
"search_time": 0.04,
"total_time": 0.04
},
{
"num_node_expansions": 27,
"plan_length": 23,
"search_time": 0.04,
"total_time": 0.04
},
{
"num_node_expansions": 31,
"plan_length": 27,
"search_time": 0.04,
"total_time": 0.04
},
{
"num_node_expansions": 28,
"plan_length": 24,
"search_time": 0.04,
"total_time": 0.04
},
{
"num_node_expansions": 18,
"plan_length": 16,
"search_time": 0.04,
"total_time": 0.04
},
{
"num_node_expansions": 17,
"plan_length": 13,
"search_time": 0.15,
"total_time": 0.15
},
{
"num_node_expansions": 20,
"plan_length": 16,
"search_time": 0.19,
"total_time": 0.19
},
{
"num_node_expansions": 16,
"plan_length": 13,
"search_time": 0.1,
"total_time": 0.1
},
{
"num_node_expansions": 28,
"plan_length": 24,
"search_time": 0.21,
"total_time": 0.21
},
{
"num_node_expansions": 13,
"plan_length": 11,
"search_time": 0.01,
"total_time": 0.01
},
{
"num_node_expansions": 19,
"plan_length": 16,
"search_time": 0.02,
"total_time": 0.02
},
{
"num_node_expansions": 17,
"plan_length": 13,
"search_time": 0.22,
"total_time": 0.22
},
{
"num_node_expansions": 16,
"plan_length": 14,
"search_time": 0.23,
"total_time": 0.23
},
{
"num_node_expansions": 20,
"plan_length": 15,
"search_time": 0.13,
"total_time": 0.13
},
{
"num_node_expansions": 16,
"plan_length": 13,
"search_time": 0.12,
"total_time": 0.12
},
{
"num_node_expansions": 27,
"plan_length": 22,
"search_time": 0.2,
"total_time": 0.2
},
{
"num_node_expansions": 21,
"plan_length": 16,
"search_time": 0.13,
"total_time": 0.13
},
{
"num_node_expansions": 19,
"plan_length": 16,
"search_time": 0.15,
"total_time": 0.15
},
{
"num_node_expansions": 22,
"plan_length": 20,
"search_time": 0.18,
"total_time": 0.18
},
{
"num_node_expansions": 25,
"plan_length": 19,
"search_time": 0.09,
"total_time": 0.09
},
{
"num_node_expansions": 21,
"plan_length": 19,
"search_time": 0.1,
"total_time": 0.1
},
{
"num_node_expansions": 17,
"plan_length": 14,
"search_time": 0.34,
"total_time": 0.34
},
{
"num_node_expansions": 38,
"plan_length": 33,
"search_time": 0.62,
"total_time": 0.62
},
{
"num_node_expansions": 17,
"plan_length": 14,
"search_time": 0.1,
"total_time": 0.1
},
{
"num_node_expansions": 23,
"plan_length": 20,
"search_time": 0.15,
"total_time": 0.15
},
{
"num_node_expansions": 21,
"plan_length": 17,
"search_time": 0.17,
"total_time": 0.17
},
{
"num_node_expansions": 9,
"plan_length": 7,
"search_time": 0.09,
"total_time": 0.09
},
{
"num_node_expansions": 18,
"plan_length": 13,
"search_time": 0.17,
"total_time": 0.17
},
{
"num_node_expansions": 29,
"plan_length": 27,
"search_time": 0.3,
"total_time": 0.3
},
{
"num_node_expansions": 15,
"plan_length": 13,
"search_time": 0.42,
"total_time": 0.42
},
{
"num_node_expansions": 35,
"plan_length": 26,
"search_time": 0.91,
"total_time": 0.91
},
{
"num_node_expansions": 18,
"plan_length": 16,
"search_time": 0.14,
"total_time": 0.14
},
{
"num_node_expansions": 22,
"plan_length": 19,
"search_time": 0.2,
"total_time": 0.2
},
{
"num_node_expansions": 24,
"plan_length": 19,
"search_time": 0.39,
"total_time": 0.39
},
{
"num_node_expansions": 17,
"plan_length": 15,
"search_time": 0.28,
"total_time": 0.28
},
{
"num_node_expansions": 10,
"plan_length": 8,
"search_time": 0.04,
"total_time": 0.04
},
{
"num_node_expansions": 17,
"plan_length": 14,
"search_time": 0.04,
"total_time": 0.04
},
{
"num_node_expansions": 28,
"plan_length": 25,
"search_time": 1.05,
"total_time": 1.05
},
{
"num_node_expansions": 19,
"plan_length": 14,
"search_time": 0.74,
"total_time": 0.74
},
{
"num_node_expansions": 11,
"plan_length": 9,
"search_time": 0.61,
"total_time": 0.61
},
{
"num_node_expansions": 39,
"plan_length": 34,
"search_time": 0.97,
"total_time": 0.97
},
{
"num_node_expansions": 15,
"plan_length": 13,
"search_time": 0.01,
"total_time": 0.01
},
{
"num_node_expansions": 25,
"plan_length": 19,
"search_time": 0.02,
"total_time": 0.02
},
{
"num_node_expansions": 20,
"plan_length": 18,
"search_time": 0.19,
"total_time": 0.19
},
{
"num_node_expansions": 27,
"plan_length": 21,
"search_time": 0.25,
"total_time": 0.25
},
{
"num_node_expansions": 14,
"plan_length": 11,
"search_time": 0.05,
"total_time": 0.05
},
{
"num_node_expansions": 31,
"plan_length": 24,
"search_time": 0.13,
"total_time": 0.13
},
{
"num_node_expansions": 9,
"plan_length": 7,
"search_time": 0.37,
"total_time": 0.37
},
{
"num_node_expansions": 33,
"plan_length": 31,
"search_time": 0.87,
"total_time": 0.87
},
{
"num_node_expansions": 18,
"plan_length": 15,
"search_time": 0.26,
"total_time": 0.26
},
{
"num_node_expansions": 18,
"plan_length": 15,
"search_time": 0.18,
"total_time": 0.18
},
{
"num_node_expansions": 18,
"plan_length": 16,
"search_time": 0.06,
"total_time": 0.06
},
{
"num_node_expansions": 18,
"plan_length": 13,
"search_time": 0.06,
"total_time": 0.06
},
{
"num_node_expansions": 11,
"plan_length": 9,
"search_time": 0.1,
"total_time": 0.1
},
{
"num_node_expansions": 10,
"plan_length": 8,
"search_time": 0.1,
"total_time": 0.1
},
{
"num_node_expansions": 18,
"plan_length": 14,
"search_time": 0.03,
"total_time": 0.03
},
{
"num_node_expansions": 28,
"plan_length": 21,
"search_time": 0.06,
"total_time": 0.06
},
{
"num_node_expansions": 22,
"plan_length": 17,
"search_time": 0.12,
"total_time": 0.12
},
{
"num_node_expansions": 12,
"plan_length": 10,
"search_time": 0.09,
"total_time": 0.09
},
{
"num_node_expansions": 15,
"plan_length": 13,
"search_time": 0.0,
"total_time": 0.0
},
{
"num_node_expansions": 13,
"plan_length": 10,
"search_time": 0.0,
"total_time": 0.0
},
{
"num_node_expansions": 22,
"plan_length": 20,
"search_time": 0.5,
"total_time": 0.5
},
{
"num_node_expansions": 27,
"plan_length": 25,
"search_time": 0.57,
"total_time": 0.57
},
{
"num_node_expansions": 19,
"plan_length": 14,
"search_time": 0.12,
"total_time": 0.12
},
{
"num_node_expansions": 13,
"plan_length": 10,
"search_time": 0.11,
"total_time": 0.11
},
{
"num_node_expansions": 13,
"plan_length": 11,
"search_time": 0.0,
"total_time": 0.0
},
{
"num_node_expansions": 20,
"plan_length": 18,
"search_time": 0.01,
"total_time": 0.01
},
{
"num_node_expansions": 20,
"plan_length": 17,
"search_time": 0.21,
"total_time": 0.21
},
{
"num_node_expansions": 28,
"plan_length": 21,
"search_time": 0.23,
"total_time": 0.23
},
{
"num_node_expansions": 13,
"plan_length": 10,
"search_time": 0.03,
"total_time": 0.03
},
{
"num_node_expansions": 17,
"plan_length": 15,
"search_time": 0.03,
"total_time": 0.03
},
{
"num_node_expansions": 23,
"plan_length": 18,
"search_time": 0.08,
"total_time": 0.08
},
{
"num_node_expansions": 19,
"plan_length": 17,
"search_time": 0.08,
"total_time": 0.08
},
{
"num_node_expansions": 22,
"plan_length": 17,
"search_time": 0.09,
"total_time": 0.09
},
{
"num_node_expansions": 22,
"plan_length": 18,
"search_time": 0.09,
"total_time": 0.09
},
{
"num_node_expansions": 16,
"plan_length": 12,
"search_time": 0.02,
"total_time": 0.02
},
{
"num_node_expansions": 14,
"plan_length": 10,
"search_time": 0.03,
"total_time": 0.03
},
{
"num_node_expansions": 19,
"plan_length": 14,
"search_time": 0.01,
"total_time": 0.01
},
{
"num_node_expansions": 17,
"plan_length": 14,
"search_time": 0.01,
"total_time": 0.01
},
{
"num_node_expansions": 22,
"plan_length": 20,
"search_time": 0.17,
"total_time": 0.17
},
{
"num_node_expansions": 19,
"plan_length": 17,
"search_time": 0.15,
"total_time": 0.15
},
{
"num_node_expansions": 15,
"plan_length": 12,
"search_time": 0.09,
"total_time": 0.09
},
{
"num_node_expansions": 21,
"plan_length": 18,
"search_time": 0.1,
"total_time": 0.1
},
{
"num_node_expansions": 28,
"plan_length": 25,
"search_time": 1.75,
"total_time": 1.75
},
{
"num_node_expansions": 25,
"plan_length": 20,
"search_time": 1.54,
"total_time": 1.54
},
{
"num_node_expansions": 9,
"plan_length": 7,
"search_time": 0.0,
"total_time": 0.0
},
{
"num_node_expansions": 9,
"plan_length": 7,
"search_time": 0.01,
"total_time": 0.01
},
{
"num_node_expansions": 14,
"plan_length": 11,
"search_time": 0.02,
"total_time": 0.02
},
{
"num_node_expansions": 19,
"plan_length": 15,
"search_time": 0.03,
"total_time": 0.03
},
{
"num_node_expansions": 17,
"plan_length": 15,
"search_time": 0.1,
"total_time": 0.1
},
{
"num_node_expansions": 26,
"plan_length": 20,
"search_time": 0.13,
"total_time": 0.13
},
{
"num_node_expansions": 13,
"plan_length": 10,
"search_time": 0.1,
"total_time": 0.1
},
{
"num_node_expansions": 15,
"plan_length": 12,
"search_time": 0.11,
"total_time": 0.11
},
{
"num_node_expansions": 35,
"plan_length": 33,
"search_time": 0.69,
"total_time": 0.69
},
{
"num_node_expansions": 33,
"plan_length": 28,
"search_time": 0.64,
"total_time": 0.64
},
{
"num_node_expansions": 20,
"plan_length": 17,
"search_time": 0.07,
"total_time": 0.07
},
{
"num_node_expansions": 13,
"plan_length": 10,
"search_time": 0.05,
"total_time": 0.05
},
{
"num_node_expansions": 16,
"plan_length": 12,
"search_time": 1.02,
"total_time": 1.02
},
{
"num_node_expansions": 24,
"plan_length": 19,
"search_time": 1.73,
"total_time": 1.73
},
{
"num_node_expansions": 17,
"plan_length": 14,
"search_time": 0.08,
"total_time": 0.08
},
{
"num_node_expansions": 24,
"plan_length": 18,
"search_time": 0.1,
"total_time": 0.1
},
{
"num_node_expansions": 32,
"plan_length": 25,
"search_time": 0.4,
"total_time": 0.4
},
{
"num_node_expansions": 25,
"plan_length": 19,
"search_time": 0.29,
"total_time": 0.29
},
{
"num_node_expansions": 21,
"plan_length": 17,
"search_time": 0.04,
"total_time": 0.04
},
{
"num_node_expansions": 22,
"plan_length": 17,
"search_time": 0.04,
"total_time": 0.04
},
{
"num_node_expansions": 21,
"plan_length": 17,
"search_time": 0.0,
"total_time": 0.0
},
{
"num_node_expansions": 22,
"plan_length": 20,
"search_time": 0.0,
"total_time": 0.0
},
{
"num_node_expansions": 14,
"plan_length": 12,
"search_time": 0.13,
"total_time": 0.13
},
{
"num_node_expansions": 31,
"plan_length": 28,
"search_time": 0.28,
"total_time": 0.28
},
{
"num_node_expansions": 17,
"plan_length": 14,
"search_time": 0.09,
"total_time": 0.09
},
{
"num_node_expansions": 26,
"plan_length": 20,
"search_time": 0.13,
"total_time": 0.13
},
{
"num_node_expansions": 18,
"plan_length": 14,
"search_time": 0.23,
"total_time": 0.23
},
{
"num_node_expansions": 16,
"plan_length": 12,
"search_time": 0.18,
"total_time": 0.18
},
{
"num_node_expansions": 16,
"plan_length": 12,
"search_time": 0.02,
"total_time": 0.02
},
{
"num_node_expansions": 25,
"plan_length": 19,
"search_time": 0.02,
"total_time": 0.02
},
{
"num_node_expansions": 17,
"plan_length": 12,
"search_time": 0.0,
"total_time": 0.0
},
{
"num_node_expansions": 16,
"plan_length": 14,
"search_time": 0.0,
"total_time": 0.0
},
{
"num_node_expansions": 19,
"plan_length": 15,
"search_time": 0.11,
"total_time": 0.11
},
{
"num_node_expansions": 18,
"plan_length": 13,
"search_time": 0.1,
"total_time": 0.1
},
{
"num_node_expansions": 21,
"plan_length": 19,
"search_time": 0.01,
"total_time": 0.01
},
{
"num_node_expansions": 23,
"plan_length": 17,
"search_time": 0.02,
"total_time": 0.02
},
{
"num_node_expansions": 19,
"plan_length": 13,
"search_time": 0.07,
"total_time": 0.07
},
{
"num_node_expansions": 15,
"plan_length": 12,
"search_time": 0.06,
"total_time": 0.06
},
{
"num_node_expansions": 23,
"plan_length": 19,
"search_time": 0.09,
"total_time": 0.09
},
{
"num_node_expansions": 20,
"plan_length": 18,
"search_time": 0.08,
"total_time": 0.08
},
{
"num_node_expansions": 21,
"plan_length": 16,
"search_time": 0.26,
"total_time": 0.26
},
{
"num_node_expansions": 20,
"plan_length": 15,
"search_time": 0.29,
"total_time": 0.29
},
{
"num_node_expansions": 18,
"plan_length": 16,
"search_time": 1.07,
"total_time": 1.07
},
{
"num_node_expansions": 25,
"plan_length": 23,
"search_time": 1.64,
"total_time": 1.64
},
{
"num_node_expansions": 14,
"plan_length": 10,
"search_time": 0.04,
"total_time": 0.04
},
{
"num_node_expansions": 17,
"plan_length": 13,
"search_time": 0.05,
"total_time": 0.05
},
{
"num_node_expansions": 24,
"plan_length": 19,
"search_time": 0.05,
"total_time": 0.05
},
{
"num_node_expansions": 22,
"plan_length": 18,
"search_time": 0.03,
"total_time": 0.03
},
{
"num_node_expansions": 15,
"plan_length": 12,
"search_time": 0.02,
"total_time": 0.02
},
{
"num_node_expansions": 11,
"plan_length": 8,
"search_time": 0.01,
"total_time": 0.01
},
{
"num_node_expansions": 20,
"plan_length": 18,
"search_time": 0.01,
"total_time": 0.01
},
{
"num_node_expansions": 27,
"plan_length": 23,
"search_time": 0.01,
"total_time": 0.01
},
{
"num_node_expansions": 20,
"plan_length": 17,
"search_time": 0.35,
"total_time": 0.35
},
{
"num_node_expansions": 17,
"plan_length": 14,
"search_time": 0.28,
"total_time": 0.28
},
{
"num_node_expansions": 15,
"plan_length": 12,
"search_time": 0.03,
"total_time": 0.03
},
{
"num_node_expansions": 17,
"plan_length": 12,
"search_time": 0.04,
"total_time": 0.04
},
{
"num_node_expansions": 19,
"plan_length": 14,
"search_time": 0.22,
"total_time": 0.22
},
{
"num_node_expansions": 39,
"plan_length": 33,
"search_time": 0.43,
"total_time": 0.43
},
{
"num_node_expansions": 16,
"plan_length": 13,
"search_time": 0.14,
"total_time": 0.14
},
{
"num_node_expansions": 25,
"plan_length": 20,
"search_time": 0.19,
"total_time": 0.19
},
{
"num_node_expansions": 21,
"plan_length": 16,
"search_time": 0.02,
"total_time": 0.02
},
{
"num_node_expansions": 27,
"plan_length": 19,
"search_time": 0.03,
"total_time": 0.03
},
{
"num_node_expansions": 18,
"plan_length": 14,
"search_time": 0.03,
"total_time": 0.03
},
{
"num_node_expansions": 16,
"plan_length": 14,
"search_time": 0.03,
"total_time": 0.03
},
{
"num_node_expansions": 29,
"plan_length": 27,
"search_time": 0.05,
"total_time": 0.05
},
{
"num_node_expansions": 19,
"plan_length": 14,
"search_time": 0.04,
"total_time": 0.04
},
{
"num_node_expansions": 21,
"plan_length": 18,
"search_time": 0.24,
"total_time": 0.24
},
{
"num_node_expansions": 16,
"plan_length": 14,
"search_time": 0.28,
"total_time": 0.28
},
{
"num_node_expansions": 14,
"plan_length": 12,
"search_time": 0.01,
"total_time": 0.01
},
{
"num_node_expansions": 17,
"plan_length": 14,
"search_time": 0.0,
"total_time": 0.0
},
{
"num_node_expansions": 20,
"plan_length": 17,
"search_time": 0.51,
"total_time": 0.51
},
{
"num_node_expansions": 14,
"plan_length": 12,
"search_time": 0.39,
"total_time": 0.39
}
]
num_timeouts = 0
num_timeouts = 0
num_problems = 172
| 21.706166
| 34
| 0.464826
| 2,590
| 22,531
| 3.710425
| 0.029344
| 0.171696
| 0.304266
| 0.039334
| 0.944537
| 0.943288
| 0.892196
| 0.892196
| 0.892196
| 0.875442
| 0
| 0.11927
| 0.382273
| 22,531
| 1,037
| 35
| 21.727097
| 0.571203
| 0
| 0
| 0.599807
| 0
| 0
| 0.38933
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
398c5c5fc791abc27536b9f67e84e841d5f2d05b
| 55
|
py
|
Python
|
roa/__init__.py
|
MatthewScholefield/read-only-attributes
|
076064482016858fcb05154579579491e1c668e9
|
[
"MIT"
] | 2
|
2020-02-28T22:33:01.000Z
|
2020-02-28T22:33:02.000Z
|
roa/__init__.py
|
MatthewScholefield/read-only-attributes
|
076064482016858fcb05154579579491e1c668e9
|
[
"MIT"
] | 5
|
2020-03-24T18:34:04.000Z
|
2022-02-02T03:38:55.000Z
|
roa/__init__.py
|
MatthewScholefield/read-only-attributes
|
076064482016858fcb05154579579491e1c668e9
|
[
"MIT"
] | 1
|
2021-08-10T09:32:14.000Z
|
2021-08-10T09:32:14.000Z
|
from .read_only_attributes import read_only_attributes
| 27.5
| 54
| 0.909091
| 8
| 55
| 5.75
| 0.625
| 0.347826
| 0.782609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072727
| 55
| 1
| 55
| 55
| 0.901961
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
39933fec402aaabb9cc4c8c12a314215ed15ef10
| 1,289
|
py
|
Python
|
dice/dice_arrays.py
|
JGRainbow/Dicify
|
75f897e592be7417fa8f0fd3d7563870fbf9e53d
|
[
"MIT"
] | 1
|
2021-02-10T22:02:04.000Z
|
2021-02-10T22:02:04.000Z
|
dice/dice_arrays.py
|
JGRainbow/Dicify
|
75f897e592be7417fa8f0fd3d7563870fbf9e53d
|
[
"MIT"
] | null | null | null |
dice/dice_arrays.py
|
JGRainbow/Dicify
|
75f897e592be7417fa8f0fd3d7563870fbf9e53d
|
[
"MIT"
] | null | null | null |
import numpy as np
d_one = np.array([
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]
])
d_two = np.array([
[0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0]
])
d_three = np.array([
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]
])
d_four = np.array([
[0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0]
])
d_five = np.array([
[0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0]
])
d_six = np.array([
[0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0]
])
| 20.790323
| 26
| 0.298681
| 322
| 1,289
| 1.177019
| 0.043478
| 1.298153
| 1.733509
| 2.026385
| 0.899736
| 0.899736
| 0.899736
| 0.899736
| 0.899736
| 0.899736
| 0
| 0.370744
| 0.384794
| 1,289
| 62
| 27
| 20.790323
| 0.107188
| 0
| 0
| 0.872727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.018182
| 0
| 0.018182
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
39989884263c2078f2d845c55a0a460c3ff7ecfa
| 102,398
|
py
|
Python
|
NGDAUpdater/NGDAUpdater.py
|
mattCensus/PerlScripts
|
d2643d99abc3f0647ebfbd41f7e5faa704da3e91
|
[
"MIT"
] | null | null | null |
NGDAUpdater/NGDAUpdater.py
|
mattCensus/PerlScripts
|
d2643d99abc3f0647ebfbd41f7e5faa704da3e91
|
[
"MIT"
] | null | null | null |
NGDAUpdater/NGDAUpdater.py
|
mattCensus/PerlScripts
|
d2643d99abc3f0647ebfbd41f7e5faa704da3e91
|
[
"MIT"
] | null | null | null |
import os
import fnmatch
import shutil
import re
import datetime
import time
#import StringIO
import pickle
import sys
import MetadataDateModules
from MetadataDateModules import metadataDateUpdater
from MetadataDateModules import TodaysDate
datesupdated=[]
NewFileArray=[]
NationalPlace=[]
DatesUpdated=0
FileCounter=0
EndDateStamp='no'
# getting today's date using the datetime module
PresentDate = datetime.datetime.now()
PresentDate.day
if PresentDate.hour > 12:
PresentHour = PresentDate.hour -12
AmPm='PM"'
else:
PresentHour =PresentDate.hour
AmPm ='AM'
presentTime= str(PresentHour) + ":" + str(PresentDate.minute) + ":" + str(PresentDate.second) + AmPm
if PresentDate.day < 10:
day = "0" + str(PresentDate.day)
else:
day = PresentDate.day
if PresentDate.month < 10:
month = "0" + str(PresentDate.month)
else:
month = PresentDate.month
PresentDate2 = str(PresentDate.year) + "-" + str(month) + "-" + str(day)
path='C:/Users/mattp/Desktop/WorkFiles/XMLFiles/2020files/ver2/fe_2020/stateNGDA/unsd'
# C:\Users\mattp\Desktop\WorkFiles\XMLFiles\2020files\ver2\fe_2020\stateNGDA\anrc\tl_2020_02_anrc.shp.iso.xml
# C:\Users\mattp\Desktop\WorkFiles\XMLFiles\2020 files\ver2\fe_2020\NationalNGDA
configfiles = [os.path.join(dirpath, f)
for dirpath, dirnames, files in os.walk(path)
for f in files if f.endswith('.xml')]
def DateStampMod(DateStampInd, CurrentDate,ContentIfoInd):
print('Now working on '+ CurrentDate)
if ContentIfoInd == 'yes':
NewFile.write(line)
EndDateStamp = 'No'
return EndDateStamp
elif DateStampInd == 'yes':
NewFile.write('<gco:Date>' + PresentDate2 + '</gco:Date>\n')
NewFile.write('</gmd:dateStamp>')
EndDateStamp= 'No'
return EndDateStamp
else:
NewFile.write('<gmd:dateStamp>')
NewFile.write('<gco:Date>' + PresentDate2 + '</gco:Date>\n')
NewFile.write('</gmd:dateStamp>')
EndDateStamp = 'No'
return EndDateStamp
def RestServiceFiller (Pass):
Theme = Pass
print("Now in the RestServiceFiller Module\n")
print("Now working on (RestServiceFiller):" + Theme+ "\n")
#NewFile.write(' <gmd:alternateTitle>\n')
AppProfile1 =' <gmd:applicationProfile>\n'
AppProfile2 =' <gco:CharacterString>https://www.geoplatform.gov/spec/esri-map-rest</gco:CharacterString>\n'
AppProfile3=' </gmd:applicationProfile>\n'
FinalAppProfile= AppProfile1 + AppProfile2 + AppProfile3
NewFile.write('Them: ' + Theme)
if re.search('AIANNH', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
NewFile.write(' <gmd:URL>https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/AIANNHA/MapServer</gmd:URL>\n')
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(' <gmd:name>\n')
NewFile.write(' <gco:CharacterString>TIGERweb/AIANNHA (MapServer)</gco:CharacterString>\n')
NewFile.write(' </gmd:name>\n')
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Rest Service contains all the Current American Indian/Alaska Native/Native Hawaiian Areas National layers</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('AITS', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
NewFile.write(' <gmd:URL>https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/AIANNHA/MapServer</gmd:URL>\n')
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(' <gmd:name>\n')
NewFile.write(' <gco:CharacterString>TIGERweb/AIANNHA (MapServer)</gco:CharacterString>\n')
NewFile.write(' </gmd:name>\n')
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Rest Service contains all the Tribal Subdivision and Oklahoma Tribal Statistical Areas layers</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('BG', theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
NewFile.write(' <gmd:URL>https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/Tracts_Blocks/MapServer</gmd:URL>\n')
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(' <gmd:name>\n')
NewFile.write(
' <gco:CharacterString>TIGERweb/Tracts_Blocks (MapServer)</gco:CharacterString>\n')
NewFile.write(' </gmd:name>\n')
NewFile.write(' <gmd:description>\n')
NewFile.write(
' <gco:CharacterString>This Rest Service contains the 2010 Census Block Groups layer</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('CBSA', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
NewFile.write(' <gmd:URL>https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/CBSA/MapServer/WMSServer</gmd:URL>\n')
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(' <gmd:name>\n')
NewFile.write(' <gco:CharacterString>TIGERweb/CBSA (MapServer)</gco:CharacterString>')
NewFile.write(' </gmd:name>\n')
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Rest Service contains the Current Metropolitan Statistical Area/Micropolitan Statistical Area (CBSA) Layers</gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('Congressional District', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
NewFile.write(' <gmd:URL>https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/Legislative/MapServer</gmd:URL>\n')
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(' <gmd:name>\n')
NewFile.write(' <gco:CharacterString>TIGERweb/Legislative (MapServer)</gco:CharacterString>\n')
NewFile.write(' </gmd:name>\n')
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Rest Service contains the 116th Congressional layer</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('CNECTA', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
NewFile.write(' <gmd:URL>https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/CBSA/MapServer/WMSServer</gmd:URL>')
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(' <gmd:name>\n')
NewFile.write(' <gco:CharacterString>TIGERweb/CBSA (MapServer)</gco:CharacterString>\n')
NewFile.write(' </gmd:name>\n')
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Rest Service contains the Combined New England City and Town Areas layers</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('estate'):
NewFile.write(' <gmd:linkage>\n')
NewFile.write(' <gmd:URL>https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/Places_CouSub_ConCity_SubMCD/MapServer</gmd:URL>')
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(' <gmd:name>\n')
NewFile.write(' <gco:CharacterString>TIGERweb/Places_CouSub_ConCity_SubMCD (MapServer)</gco:CharacterString>\n')
NewFile.write(' </gmd:name>\n')
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Rest Service contains the estates layers</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('Current County and Equivalent', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
NewFile.write(' <gmd:URL>https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/State_County/MapServer</gmd:URL>')
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(' <gmd:name>\n')
NewFile.write(' <gco:CharacterString>TIGERweb/State_County (MapServer)</gco:CharacterString>')
NewFile.write(' </gmd:name>\n')
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Rest Service contains the Counties and Equivalent Layer</gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('CSA', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
NewFile.write(' <gmd:URL>https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/CBSA/MapServer/WMSServer</gmd:URL>\n')
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(' <gmd:name>\n')
NewFile.write(' <gco:CharacterString>TIGERweb/CBSA (MapServer)</gco:CharacterString>')
NewFile.write(' </gmd:name>\n')
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Rest Service contains the Current Combined Statistical Area (CSA)</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('Current Metropolitan Division', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
NewFile.write(' <gmd:URL>https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/CBSA/MapServer/WMSServer</gmd:URL>\n')
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(' <gmd:name>\n')
NewFile.write(' <gco:CharacterString>TIGERweb/CBSA (MapServer)</gco:CharacterString>\n')
NewFile.write(' </gmd:name>\n')
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Rest Service contains the Current Metropolitan Divisions</gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('NECTA Division National', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
NewFile.write(' <gmd:URL>https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/CBSA/MapServer/WMSServer</gmd:URL>\n')
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(' <gmd:name>\n')
NewFile.write(' <gco:CharacterString>TIGERweb/CBSA (MapServer)</gco:CharacterString>\n')
NewFile.write(' </gmd:name>\n')
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Rest Service contains the Current New England City and Town Area divisions layer</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('NECTA', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
NewFile.write(' <gmd:URL>https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/CBSA/MapServer/WMSServer</gmd:URL>\n')
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(' <gmd:name>\n')
NewFile.write(' <gco:CharacterString>TIGERweb/CBSA (MapServer)</gco:CharacterString>\n')
NewFile.write(' </gmd:name>\n')
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Rest Service contains the Current New England City and Town Areas layer</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('Current State and Equivalent', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
NewFile.write(' <gmd:URL>https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/State_County/MapServer</gmd:URL>')
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(' <gmd:name>\n')
NewFile.write(' <gco:CharacterString>TIGERweb/State_County (MapServer)</gco:CharacterString>')
NewFile.write(' </gmd:name>\n')
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Rest Service contains the States and Equivalents Layers</gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('Current Tribal Block Group', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
NewFile.write(' <gmd:URL>https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/TribalTracts/MapServer</gmd:URL>')
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(' <gmd:name>\n')
NewFile.write(' <gco:CharacterString>TIGERweb/TribalTracts (MapServer)</gco:CharacterString>\n')
NewFile.write(' </gmd:name>\n')
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Rest Service contains the Tribal Block Group Layers</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('Current Tribal Census Tract', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
NewFile.write(' <gmd:URL>https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/TribalTracts/MapServer</gmd:URL>\n')
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(' <gmd:name>\n')
NewFile.write(' <gco:CharacterString>TIGERweb/TribalTracts (MapServer)</gco:CharacterString>\n')
NewFile.write(' </gmd:name>\n')
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Rest Service contains the Tribal Census Tracts Layers</gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('Census Urban Area', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
NewFile.write(' <gmd:URL>https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/Urban/MapServer</gmd:URL>\n')
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(' <gmd:name>\n')
NewFile.write(' <gco:CharacterString>TIGERweb/Urban (MapServer)</gco:CharacterString>')
NewFile.write(' </gmd:name>\n')
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Rest Service contains the 2010 Census Urban Area Clusters</gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('ZCTA5', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
NewFile.write('<gmd:URL>https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/PUMA_TAD_TAZ_UGA_ZCTA/MapServer</gmd:URL>\n')
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(' <gmd:name>\n')
NewFile.write(' <gco:CharacterString>TIGERweb/PUMA_TAD_TAZ_UGA_ZCTA (MapServer)</gco:CharacterString>')
NewFile.write(' </gmd:name>\n')
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Rest Service contains the Zip Code Tabulation Layer</gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('Current County Subdivision',Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
NewFile.write('<gmd:URL>https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/Places_CouSub_ConCity_SubMCD/MapServer</gmd:URL>\n')
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(' <gmd:name>\n')
NewFile.write(' <gco:CharacterString>TIGERweb/Places_CouSub_ConCity_SubMCD (MapServer) (MapServer)</gco:CharacterString>')
NewFile.write(' </gmd:name>\n')
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Rest Service contains the County Sudivisions</gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('Current Place',Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
NewFile.write('<gmd:URL>https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/Places_CouSub_ConCity_SubMCD/MapServer</gmd:URL>\n')
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(' <gmd:name>\n')
NewFile.write(' <gco:CharacterString>TIGERweb/Places_CouSub_ConCity_SubMCD (MapServer) (MapServer)</gco:CharacterString>')
NewFile.write(' </gmd:name>\n')
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Rest Service contains the places</gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('PUMA',Theme,flags=0):
NewFile.write(' <gmd:linkage>\n')
NewFile.write('<gmd:URL>https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/PUMA_TAD_TAZ_UGA_ZCTA/MapServer</gmd:URL>\n')
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(' <gmd:name>\n')
NewFile.write(' <gco:CharacterString>TIGERweb/PUMA_TAD_TAZ_UGA_ZCTA (MapServer)</gco:CharacterString>')
NewFile.write(' </gmd:name>\n')
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Rest Service contains the 2010 Public Use Microdata Area layer</gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('(SLD) Lower Chamber',Theme,flags=0):
NewFile.write(' <gmd:linkage>\n')
NewFile.write('<gmd:URL>https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/Legislative/MapServer</gmd:URL>\n')
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(' <gmd:name>\n')
NewFile.write(
' <gco:CharacterString>TIGERweb/Legislative (MapServer)</gco:CharacterString>')
NewFile.write(' </gmd:name>\n')
NewFile.write(' <gmd:description>\n')
NewFile.write(
' <gco:CharacterString>This Rest Service contains the state legislative districts - lower chamber (House of Representatives) layer</gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('Upper Chamber', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
NewFile.write(
'<gmd:URL>https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/Legislative/MapServer</gmd:URL>\n')
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(' <gmd:name>\n')
NewFile.write(' <gco:CharacterString>TIGERweb/Legislative (MapServer)</gco:CharacterString>\n')
NewFile.write(' </gmd:name>\n')
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Rest Service contains the state legislative districts - upper chamber (Senate) layer</gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('2010 Census',Theme,flags=0):
NewFile.write(' <gmd:linkage>\n')
NewFile.write('<gmd:URL>https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/Tracts_Blocks/MapServer</gmd:URL>\n')
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(' <gmd:name>\n')
NewFile.write(' <gco:CharacterString>TIGERweb/Tracts_Blocks (MapServer)a</gco:CharacterString>\n')
NewFile.write(' </gmd:name>\n')
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Rest Service contains the 2010 Census Block layers</gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
else:
NewFile.write(' <gmd:linkage>\n')
NewFile.write('<gmd:URL>https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb</gmd:URL>\n')
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(' <gmd:name>\n')
NewFile.write(' <gco:CharacterString>TIGERweb/</gco:CharacterString>\n')
NewFile.write(' </gmd:name>\n')
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Rest Service contains FILL IN HERE!!!!</gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
def WMSFiller(Pass):
Theme = Pass
AppProfile1 = ' <gmd:applicationProfile>\n'
AppProfile2 = ' <gco:CharacterString>http://opengis.net/spec/wms</gco:CharacterString>\n'
AppProfile3 = ' </gmd:applicationProfile>\n'
FinalAppProfile = AppProfile1 + AppProfile2 + AppProfile3
Name1=' <gmd:name>\n'
Name2=' <gco:CharacterString>TIGERweb/tigerWMS_Current (MapServer)</gco:CharacterString>\n'
Name3=' </gmd:name>\n'
FinalAppName = Name1 + Name2 + Name3
Current1=' <gmd:linkage>\n'
Current2=' <gmd:URL>https://tigerweb.geo.census.gov/arcgis/services/TIGERweb/tigerWMS_Current/MapServer/WMSServer</gmd:URL>\n'
Current3=' </gmd:linkage>\n'
FinalCurrentWMS = Current1 + Current2 + Current3
if re.search('AIANNH', Theme, flags=0):
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This web mapping service contains the layer for Current American Indian/Alaska Native/Native Hawaiian Areas. This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification. </gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('AITS', Theme, flags=0):
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This web mapping service contains the layer for Current American Indian Tribal Subdivision. This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification.</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('BG', Theme, flags=0):
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(
' <gco:CharacterString>This web mapping service contains the layer forBlock Groups. This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification.</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('CBSA', Theme, flags=0):
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This web mapping service Service contains the Current Metropolitan Statistical Area/Micropolitan Statistical Area (CBSA) Layers. This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification</gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('Congressional District', Theme, flags=0):
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This web mapping service contains the layer for 116th Congressional Districts. This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification.</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('CNECTA', Theme, flags=0):
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This web mapping service contains the layer for the Combined New England City and Town Areas. This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification.</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('Current County and Equivalent', Theme, flags=0):
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This web mapping service contains the layer for the Current County and Equivalent. This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification.</gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('CSA', Theme, flags=0):
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This web mapping service contains the layer for the Current Combined Statistical Area (CSA). This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification.</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search ('estates', Theme, flags=0):
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This web mapping service contains the layer for the estates in the Virgin Islands. This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification.</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('Current Metropolitan Division', Theme, flags=0):
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This web mapping service contains the layer for the Current Metropolitan Division. This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification.</gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('NECTA Division National', Theme, flags=0):
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This web mapping service contains the layer for the New England City and Town Area Divisions. This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification.</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('NECTA', Theme, flags=0):
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This web mapping service contains the layer for the Current New England City and Town Areas. This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification.</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('Current State and Equivalent', Theme, flags=0):
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This web mapping service contains the layer for the States and Equivalents. This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification. </gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('Current Tribal Block Group', Theme, flags=0):
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This web mapping service contains the layer for Current Tribal Block Groups. This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification.</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('Current Tribal Census Tract', Theme, flags=0):
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This web mapping service contains the layer for Current Tribal Census Tracts. This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification.</gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('Census Urban Area', Theme, flags=0):
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>TThis web mapping service contains the layer for the 2010 Census Urban Area Clusters. This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification.</gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('ZCTA5', Theme, flags=0):
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This web mapping service contains the layer for the Zip Code Tabulation Areas. This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification.</gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('Current County Subdivision',Theme, flags=0):
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This web mapping service contains the layer for the County Sudivisions. This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification. </gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('Current Place',Theme,flags=0):
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This web mapping service contains the layer for the places. This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification. </gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('PUMA',Theme,flags=0):
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This web mapping service contains the layer for the 2010 Public Use Microdata Areas. This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification. </gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('(SLD) Lower Chamber',Theme,flags=0):
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This web mapping service contains the layer for state legislative districts - lower chamber. This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification. </gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('Upper Chamber', Theme, flags=0):
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This web mapping service contains the layer for state legislative districts - upper chamber. This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification. </gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('2010 Census Block', Theme,flags=0):
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This web mapping service contains the layer for 2010 Census Blocks. This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification. </gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('2020 Census Block', Theme, flags=0):
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This web mapping service contains the layer for 2020 Census Blocks. This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification. </gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('Current Census Tract', Theme, flags=0):
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This web mapping service contains the layer for 2010 Census Tracts. This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification. </gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
else:
NewFile.write(FinalCurrentWMS)
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This web mapping service contains the layer for the '+ Theme+ '. This URL is to be used in mapping software like ArcMap. To use this in a web browser, see the OGC Web Mapping Specification.</gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
def ThemeDir (Pass):
Theme = Pass
#print("Now in theThemeDir Module\n")
#print("Now working on:" + Theme)
if re.search('AIANNH',Theme, flags=0):
return 'aiannh'
elif re.search('aitsn',Theme,flags=0):
return 'aitsn'
elif re.search('BG',Theme,flags=0):
return'bg'
elif re.search('CBSA',Theme,flags=0):
return 'cbsa'
elif re.search('Congressional District',Theme,flags=0):
return 'cd116'
elif re.search('CNECTA',Theme,flags=0):
return 'cnecta'
elif re.search('Current County and Equivalent',Theme,flags=0):
return 'county'
elif re.search('CSA',Theme,flags=0):
return 'csa'
elif re.search('Current Metropolitan Division',Theme,flags=0):
return 'metdiv'
elif re.search('NECTA Division National',Theme,flags=0):
return 'nectadiv'
elif re.search('NECTA',Theme,flags=0):
return 'necta'
elif re.search('Current State and Equivalent',Theme,flags=0):
return 'state'
elif re.search('Current Tribal Block Group',Theme,flags=0):
return 'tbg'
elif re.search('Current Tribal Census Tract',Theme,flags=0):
return 'ttract'
elif re.search('Census Urban Area',Theme,flags=0):
return 'uac10'
elif re.search('ZCTA5',Theme,flags=0):
return 'zcta510'
elif re.search('Current Block Group',Theme,flags=0):
return 'bg'
elif re.search('Current County Subdivision',Theme,flags=0):
return'cousub'
elif re.search('Current Place',Theme,flags=0):
return 'place'
elif re.search('PUMA',Theme,flags=0):
return 'puma10'
elif re.search('(SLD) Lower Chamber',Theme,flags=0):
return'sldl'
elif re.search('Lower Chamber',Theme,flags=0):
return 'sldl'
elif re.search('Upper Chamber', Theme, flags=0):
return'sldu'
elif re.search('2010 Census Block',Theme,flags=0):
return 'tabblock10'
elif re.search('2020 Census Block',Theme,flags=0):
return 'tabblock20'
elif re.search('Current Census Tract',Theme,flags=0):
return 'tract'
elif re.search('Current Unified School Districts Shapefile',Theme,flags=0):
return'unsd'
else:
return 'Fill_in- for(ThemeDir)' + Theme
def EAFileFiller(Pass):
Theme = Pass
AppProfile1 = ' <gmd:applicationProfile>\n'
AppProfile2 = ' <gco:CharacterString>https</gco:CharacterString>\n'
AppProfile3 = ' </gmd:applicationProfile>\n'
FinalAppProfile = AppProfile1 + AppProfile2 + AppProfile3
Name1 = ' <gmd:name>\n'
Name2 = ' <gco:CharacterString>Entity and Attribute File</gco:CharacterString>\n'
Name3 = ' </gmd:name>\n'
FinalAppName = Name1 + Name2 + Name3
if re.search('AIANNH', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme= str(ThemeDir(Theme))
eafileName='tl_2020_'+ EATheme + '.ea.iso.xml'
eaUrL= EAFirstPart + '/'+ EATheme + '/' + eafileName+ '</gmd:URL>'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Entity and attribute contains the attributes for the the Current American Indian/Alaska Native/Native Hawaiian Areas National (AIANNH) National Shapefile </gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('AITS', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme= str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL= EAFirstPart + '/'+ EATheme + '/' + eafileName+ '</gmd:URL>'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Entity and attribute contains the attributes for the the Current American Indian Tribal Subdivision (AITS) National Shapefile </gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('BG', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme = str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL = EAFirstPart + '/' + EATheme + '/' + eafileName + '</gmd:URL>'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(
' <gco:CharacterString>This Entity and attribute contains the attributes for the the Current Block Groups State Shapefile </gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('Current Block Group', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme = str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL = EAFirstPart + '/' + EATheme + '/' + eafileName + '</gmd:URL>'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(
' <gco:CharacterString>This Entity and attribute contains the attributes for the the Current Block Groups State Shapefile </gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('CBSA', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme= str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL= EAFirstPart + '/'+ EATheme + '/' + eafileName+ '</gmd:URL>'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Entity and attribute contains the attributes for the the Current Metropolitan Statistical Area/Micropolitan Statistical Area (CBSA) National Shapefile</gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif (re.search('estates', Theme, flags=0)):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme = str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL = EAFirstPart + '/' + EATheme + '/' + eafileName + '</gmd:URL>'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Entity and attribute contains the attributes for the the Current estates state Shapefile</gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('Congressional District', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme= str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL= EAFirstPart + '/'+ EATheme + '/' + eafileName+ '</gmd:URL>'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Entity and attribute contains the attributes for the the 116th Congressional District National Shapefile</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('CNECTA', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme= str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL= EAFirstPart + '/'+ EATheme + '/' + eafileName+ '</gmd:URL>'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Entity and attribute contains the attributes for the the Current Combined New England City and Town Area (CNECTA) National Shapefile</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('Current County and Equivalent', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme= str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL= EAFirstPart + '/'+ EATheme + '/' + eafileName+ '</gmd:URL>'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Entity and attribute contains the attributes for the the Current County and Equivalent National Shapefile </gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('CSA', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme= str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL= EAFirstPart + '/'+ EATheme + '/' + eafileName+ '</gmd:URL>'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Entity and attribute contains the attributes for the the Current Combined Statistical Area (CSA) National Shapefile</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('Current Metropolitan Division', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme= str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL= EAFirstPart + '/'+ EATheme + '/' + eafileName+ '</gmd:URL>'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Entity and attribute contains the attributes for the the Current Metropolitan Division National Shapefile</gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('NECTA Division National', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme= str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL= EAFirstPart + '/'+ EATheme + '/' + eafileName+ '</gmd:URL>'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Entity and attribute contains the attributes for the Current NECTA Division National Shapefile</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('NECTA', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme= str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL= EAFirstPart + '/'+ EATheme + '/' + eafileName+ '</gmd:URL>'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Entity and attribute contains the attributes for the Current New England City and Town Area (NECTA) National Shapefile</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('Current State and Equivalent', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme= str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL= EAFirstPart + '/'+ EATheme + '/' + eafileName+ '</gmd:URL>'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Entity and attribute contains the attributes for the Current State and Equivalent National Shapefile </gco:CharacterString>')
NewFile.write(' </gmd:description>\n')
elif re.search('Current Tribal Block Group', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme= str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL= EAFirstPart + '/'+ EATheme + '/' + eafileName+ '</gmd:URL>'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Entity and attribute contains the attributes for the Current Tribal Block Group National Shapefile</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('Current Tribal Census Tract', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme= str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL= EAFirstPart + '/'+ EATheme + '/' + eafileName+ '</gmd:URL>'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Entity and attribute contains the attributes for the Current Tribal Census Tract National Shapefile</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('Census Urban Area', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme= str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL= EAFirstPart + '/'+ EATheme + '/' + eafileName+ '</gmd:URL>'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Entity and attribute contains the attributes for the Census Urban Area National Shapefile</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('ZCTA5', Theme, flags=0):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme= str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL= EAFirstPart + '/'+ EATheme + '/' + eafileName+ '</gmd:URL>'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Entity and attribute contains the attributes for the Census 5-Digit ZIP Code Tabulation Area (ZCTA5) National Shapefile</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('Current County Subdivision',Theme,flags=0):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme = str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL = EAFirstPart + '/' + EATheme + '/' + eafileName + '</gmd:URL>\n'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(
' <gco:CharacterString>This Entity and attribute contains the attributes for the Current County Subdivision State Shapefiles</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('Current Place',Theme,flags=0):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme = str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL = EAFirstPart + '/' + EATheme + '/' + eafileName + '</gmd:URL>'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(
' <gco:CharacterString>This Entity and attribute contains the attributes for the place State Shapefiles</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('PUMA',Theme,flags=0):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme = str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL = EAFirstPart + '/' + EATheme + '/' + eafileName + '</gmd:URL>'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Entity and attribute contains the attributes for the PUMA State Shapefiles</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('Upper Chamber',Theme,flags=0):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme = str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL = EAFirstPart + '/' + EATheme + '/' + eafileName + '</gmd:URL>\n'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Entity and attribute contains the attributes for the PUMA State Shapefiles</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('2010 Census Block',Theme,flags=0):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme = str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL = EAFirstPart + '/' + EATheme + '/' + eafileName + '</gmd:URL>\n'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Entity and attribute contains the attributes for the Tabblock10 State Shapefiles</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('2020 Census Block',Theme,flags=0):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme = str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL = EAFirstPart + '/' + EATheme + '/' + eafileName + '</gmd:URL>\n'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Entity and attribute contains the attributes for the Tabblock10 State Shapefiles</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('Current Census Tract',Theme,flags=0):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme = str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL = EAFirstPart + '/' + EATheme + '/' + eafileName + '</gmd:URL>\n'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>This Entity and attribute contains the attributes for the Tract State Shapefiles</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
elif re.search('Current Unified School Districts Shapefile',Theme,flags=0):
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme = str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL = EAFirstPart + '/' + EATheme + '/' + eafileName + '</gmd:URL>\n'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(
' <gco:CharacterString>This Entity and attribute contains the attributes for the Unified School Districts State Shapefiles</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
else:
NewFile.write(' <gmd:linkage>\n')
EAFirstPart = ' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'
EATheme = str(ThemeDir(Theme))
eafileName = 'tl_2020_' + EATheme + '.ea.iso.xml'
eaUrL = EAFirstPart + '/' + EATheme + '/' + eafileName + '</gmd:URL>'
NewFile.write(eaUrL)
NewFile.write(' </gmd:linkage>\n')
NewFile.write(FinalAppProfile)
NewFile.write(FinalAppName)
NewFile.write(' <gmd:description>\n')
NewFile.write(' <gco:CharacterString>' + Theme + '</gco:CharacterString>\n')
NewFile.write(' </gmd:description>\n')
def eaUrl(Pass):
Theme = Pass
#print("Now in the eaUrl Module\n")
#print("Now working on:" + Theme)
EATheme = str(ThemeDir(Theme))
FirstPartUrl='https://meta.geo.census.gov/data/existing/decennial/GEO/GPMB/TIGERline/'
YearDir='Tiger2020'
EAFileName='tl_2020_' + EATheme + '.shp.ea.iso.xml'
FinalEaFile= FirstPartUrl + '/' + YearDir + "/" + EATheme + '/' + EAFileName +'\n'
return FinalEaFile
def eaTitle(Pass):
Theme = Pass
#print("Now in the eaTitle Module\n")
#print("Now working on:" + Theme)
if re.search('AIANNH', Theme, flags=0):
return '<gco:CharacterString>Feature Catalog for the 2020 Current American Indian/Alaska Native/Native Hawaiian Areas National (AIANNH) National Shapefile</gco:CharacterString>\n'
elif re.search('AITS', Theme, flags=0):
return '<gco:CharacterString>Feature Catalog for the 2020 Current American Indian Tribal Subdivision (AITS) National Shapefile</gco:CharacterString>\n'
elif re.search ('BG', Theme, flags=0):
return 'Feature Catalog for the 2020 TIGER/Line Shapefile Current Block Group State-based Shapefile\n'
elif re.search('CBSA', Theme, flags=0):
return '<gco:CharacterString>Feature Catalog for the 2020 Current Metropolitan Statistical Area/Micropolitan Statistical Area (CBSA) National Shapefile</gco:CharacterString>\n'
elif re.search('Congressional District', Theme, flags=0):
return '<gco:CharacterString>Feature Catalog for the 2020 116th Congressional District National Shapefile</gco:CharacterString>\n'
elif re.search('CNECTA', Theme, flags=0):
return '<gco:CharacterString>Feature Catalog for the Current 2020 Combined New England City and Town Area (CNECTA) National Shapefile</gco:CharacterString>\n'
elif re.search('Current County and Equivalent', Theme, flags=0):
return '<gco:CharacterString>Feature Catalog for the 2020 Current County and Equivalent National Shapefile</gco:CharacterString>\n'
elif re.search('CSA', Theme, flags=0):
return '<gco:CharacterString>Feature Catalog for the 2020 Current Combined Statistical Area (CSA) National Shapefile</gco:CharacterString>\n'
elif re.search('Current Metropolitan Division', Theme, flags=0):
return '<gco:CharacterString>Feature Catalog for the 2020 Current Metropolitan Division National Shapefile</gco:CharacterString>\n'
elif re.search('NECTA Division National', Theme, flags=0):
return '<gco:CharacterString>Feature Catalog for the 2020 Current NECTA Division National Shapefile</gco:CharacterString>\n'
elif re.search('NECTA', Theme, flags=0):
return '<gco:CharacterString>Feature Catalog for the 2020 Current New England City and Town Area (NECTA) National Shapefile</gco:CharacterString>\n'
elif re.search('Current State and Equivalent', Theme, flags=0):
return '<gco:CharacterString>Feature Catalog for the 2020 Current State and Equivalent National Shapefile</gco:CharacterString>\n'
elif re.search('Current Tribal Block Group', Theme, flags=0):
return '<gco:CharacterString>Feature Catalog for the 2020 Current Tribal Block Group National Shapefile</gco:CharacterString>\n'
elif re.search('Current Tribal Census Tract', Theme, flags=0):
return '<gco:CharacterString>Feature Catalog for the 2020 Current Tribal Census Tract National Shapefile</gco:CharacterString>\n'
elif re.search('Census Urban Area', Theme, flags=0):
return '<gco:CharacterString>Feature Catalog for the 2010 Census Urban Area National Shapefile</gco:CharacterString>\n'
elif re.search('ZCTA5', Theme, flags=0):
return '<gco:CharacterString>Feature Catalog for the 2020 2010 Census 5-Digit ZIP Code Tabulation Area (ZCTA5) National Shapefile</gco:CharacterString>\n'
elif re.search('estate', Theme, flags=0):
return '<gco:CharacterString>Feature Catalog for the 2020 TIGER/Line Shapefile Current Estate State-based Shapefile (U.S. Virgin Islands only)</gco:CharacterString>\n'
elif re.search('Current Block Group', Theme, flags=0):
return '<gco:CharacterString>Feature Catalog for the 2020 TIGER/Line Shapefile Current Block Group State-based Shapefile</gco:CharacterString>\n'
elif re.search('Current County Subdivision', Theme, flags=0):
return'<gco:CharacterString>Feature Catalog for the 2020 TIGER/Line Shapefile Current County Subdivision State-based Shapefile</gco:CharacterString>\n'
elif re.search('Current Place',Theme,flags=0):
return'<gco:CharacterString>Feature Catalog for the 2020 TIGER/Line Shapefile Current Place State-based Shapefile</gco:CharacterString>\n'
elif re.search('PUMA',Theme,flags=0):
return '<gco:CharacterString>Feature Catalog for the 2020 TIGER/Line Shapefile Public Use Microdata Area (PUMA) State-based Shapefile</gco:CharacterString>\n'
elif re.search('Lower Chamber',Theme,flags=0):
return '<gco:CharacterString>Feature Catalog for the 2020 TIGER/Line Shapefile Current State Legislative District (SLD) Lower Chamber State-based Shapefile</gco:CharacterString>\n'
elif re.search('Upper Chamber',Theme, flags=0):
return '<gco:CharacterString>Feature Catalog for the 2020 TIGER/Line Shapefile Current State Legislative District (SLD) Upper Chamber State-based Shapefile</gco:CharacterString>\n'
elif re.search('2010 Census Block',Theme,flags=0):
return'<gco:CharacterString>Feature Catalog for the 2020 TIGER/Line Shapefile 2010 Census Block state based Shapefile</gco:CharacterString>\n'
elif re.search('2020 Census Block',Theme,flags=0):
return '<gco:CharacterString>Feature Catalog for the 2020 TIGER/Line Shapefile 2020 Census Block state based Shapefile</gco:CharacterString>\n'
elif re.search('Current Census Tract',Theme,flags=0):
return '<gco:CharacterString>Feature Catalog for the 2020 TIGER/Line Shapefile 2020 tracts state based Shapefile</gco:CharacterString>\n'
elif re.search('unsd',Theme,flags=0):
return'<gco:CharacterString>Feature Catalog for the 2020 TIGER/Line Shapefile Current Unified School Districts Shapefile State-based</gco:CharacterString>\n'
elif re.search('Current Unified School Districts Shapefile',Theme,flags=0):
return '<gco:CharacterString>Feature Catalog for the 2020 TIGER/Line Shapefile Current Unified School Districts Shapefile State-based</gco:CharacterString>\n'
else:
return '<gco:CharacterString> The theme is ' + Theme +'(eatitle)</gco:CharacterString>\n'
if os.path.exists(path):
print("The " + path + " directory exists")
else:
print("Could not find " + path + ". Please make sure the path is correct")
sys.exit(1)
def keywordCounter(input):
file=input
KeywordModCounter=0
ReadFile = open(file, "r")
for line in ReadFile:
if re.search('<gmd:keyword>',line,flags=0):
KeywordModCounter+=1
else:
continue
FinalKeyword= KeywordModCounter-3
return FinalKeyword
for file in configfiles:
transferOptionsCounter=0
linkageCounter=0
editionCounter=0
FileCounter += 1
gmdDateCounter=0
KeywordModCounter=0
KeywordGood = 'yes'
keywordCounter =0
NationalPlace.clear()
nationalPlaceInd = 'no'
keywordind = 'no'
InCitInd = 'no'
TitleEndCharacterString ='no'
DescriptiveKeywordsInd='off'
MafTigerInd = 'no'
dotLocation = file.find(".")
preDot = file[0:dotLocation]
postDot = file[dotLocation:]
ContentIfoInd = 'no'
FirstTitle = 'Yes'
endTitleCounter=0
datasetUriind = 'no'
OutFile = preDot + "_corrected_" + postDot
ReadFileA = open(file, "r")
for line in ReadFileA:
if re.search('<gmd:keyword>', line, flags=0):
KeywordModCounter += 1
else:
continue
PrePlace = KeywordModCounter -6
StateKeywords= PrePlace +3
ReadFileA.close()
#finalKeyword=int(keywordCounter(file))
print (' PrePlace' + str( PrePlace))
print ('StateKeywords' + str(StateKeywords))
#print("preDot: " + preDot)
#print("PostDot: " + postDot)
#print("Outfile" + OutFile)
#print("File: " + file)
print("Now Working on: " + file)
#print ("Outfile=" + OutFile)
ReadFile = open(file, "r")
with open(OutFile, "w") as NewFile:
for line in ReadFile:
if re.search('gmd:linkage',line,flags=0):
linkageCounter+=1
#NewFile.write('<!-- if #1 -->\n')
if linkageCounter == 1:
LinkageInd='yes'
NewFile.write(line)
else:
NewFile.write(line)
elif re.search('<gco:CharacterString>MAF/TIGER</gco:CharacterString>', line, flags=0):
NewFile.write(line)
MafTigerInd = 'yes'
NewFile.write('<!-- MafTigerind: ' + MafTigerInd + ' -->\n')
elif re.search('gmd:URL',line, flags=0):
#NewFile.write('<!-- if #2 -->\n')
print("---------------------------------\n")
#print ("LinkageInd: " + LinkageInd + "\n")
if LinkageInd =="yes":
#NewFile.write(line)
lastSlash=line.rfind("/tl")+1
lastEndtag=line.find("</gmd:URL>")
ZipFileName=line[lastSlash: lastEndtag]
ThemeURL=str(ThemeDir( mainTheme))
'''
# NewFile.write('<!-- ZipFileName ' + ZipFileName + '-->')
#NewFile.write('<!-- ThemeURL' + ThemeURL + '-->')
'''
FinalZip=' <gmd:URL>https://www2.census.gov/geo/tiger/TIGER2020/'+ ThemeURL +'/' + ZipFileName + '</gmd:URL>\n'
LinkageInd="No"
# print('In the LinkageId section\n')
#print(line)
#print ('ZipFileName: ' + ZipFileName)
LinkageInd='No'
#NewFile.write('<!--- What is going on here? -->')
NewFile.write(FinalZip)
else:
NewFile.write(line)
elif re.search('<gco:CharacterString>.shp.iso.xml',line, flags=0):
#NewFile.write('<!-- if #3 -->\n')
NewFile.write(' <gco:CharacterString>' + file + '</gco:CharacterString>')
elif re.search ('codeListValue=""',line,flags=0):
#NewFile.write('<!-- if #4 -->\n')
NewFile.write(' codeListValue="dataset"/>')
elif re.search('<gmd:MD_GeometricObjectTypeCode',line,flags=0):
#NewFile.write('<!-- if #5 -->\n')
lastCarrot=line.find('>')-1
maipart=line[0:lastCarrot]
GMTC=maipart+'" codeListValue="complex">complex</gmd:MD_GeometricObjectTypeCode>'
NewFile.write(GMTC)
elif re.search('</gmd:featureTypes>',line,flags=0):
#NewFile.write('<!-- if #6 -->\n')
NewFile.write(line)
NewFile.write(' <gmd:featureCatalogueCitation>')
NewFile.write(' <gmd:CI_Citation>\n')
NewFile.write(' <gmd:title>\n')
NewFile.write(str(eaTitle(mainTheme)))
NewFile.write(' </gmd:title>\n')
NewFile.write(' <gmd:date>\n')
NewFile.write(' <gmd:CI_Date>\n')
NewFile.write(' <gmd:date>\n')
NewFile.write(' <gco:Date>2020</gco:Date>\n')
NewFile.write(' </gmd:date>\n')
NewFile.write(' <gmd:dateType>\n')
NewFile.write(' <gmd:CI_DateTypeCode codeList="http://www.isotc211.org/2005/resources/Codelist/gmxCodelists.xml#CI_DateTypeCode" codeListValue="publication" codeSpace="002"/>\n')
NewFile.write(' </gmd:dateType>\n')
NewFile.write(' </gmd:CI_Date>\n')
NewFile.write(' </gmd:date>\n')
NewFile.write(' <gmd:citedResponsibleParty xlink:href="https://www.ngdc.noaa.gov/docucomp/1df27e57-4768-42de-909b-52f530601fba" xlink:title="U.S Department of Commerce, U.S Census Bureau, Geography Division (distributor)"/>')
NewFile.write(' <gmd:otherCitationDetails>\n')
EAFile = str(eaUrl(mainTheme))
NewFile.write(' <gco:CharacterString>' + EAFile + '</gco:CharacterString>\n')
NewFile.write(' </gmd:otherCitationDetails>\n')
NewFile.write(' </gmd:CI_Citation>\n')
NewFile.write(' </gmd:featureCatalogueCitation>\n')
elif re.search('</gmd:protocol>',line,flags=0):
#NewFile.write('<!-- if #7 -->\n')
if transferOptionsCounter == 0:
NewFile.write(line)
NewFile.write(' <gmd:applicationProfile>\n')
NewFile.write(' <gco:CharacterString>ZIP</gco:CharacterString>\n')
NewFile.write('</gmd:applicationProfile>\n')
NewFile.write('<gmd:name>\n')
NewFile.write('<gco:CharacterString>'+ ZipFileName + '</gco:CharacterString>\n')
NewFile.write(' </gmd:name>\n')
NewFile.write('<gmd:description>\n')
NewFile.write(' <gco:CharacterString> This zip file contains the ' + file + ' shapefile </gco:CharacterString>\n')
NewFile.write('</gmd:description>\n')
else:
NewFile.write(line)
elif re.search('<gco:CharacterString>TIGER/Line Shapefile',line,flags=0):
#NewFile.write('<!-- if #8 -->\n')
if FirstTitle == 'Yes':
FirstTitle ='No'
TitleEndCharacterString='yes'
mainTitle=line
lastComma=line.rfind(',')+1
if re.search('</gco:CharacterString>',line,flags=0):
closingTagLoc=line.find('</')
mainTheme = line[lastComma:closingTagLoc]
else:
mainTheme=line[lastComma:]
Geography=line[68:lastComma-1]
#print ('Geography:' + Geography)
PrimaryAlternateTitle = '<gco:CharacterString>TIGER/Line Shapefile, Current, ' + Geography + mainTheme + '</gco:CharacterString>\n'
NewFile.write(PrimaryAlternateTitle)
#NewFile.write('<!-- Check 1 -->\n')
NewFile.write('</gmd:title>\n')
NewFile.write(' <gmd:alternateTitle>\n')
if re.search('</gco:CharacterString>',mainTitle,flags=0):
NewFile.write(mainTitle)
else:
NewFile.write(mainTitle+ '</gco:CharacterString>')
NewFile.write(' </gmd:alternateTitle>\n')
FirstAlternativeTitle(mainTheme)
else:
NewFile.write(line)
elif re.search('</gmd:transferOptions>', line, flags=0):
#NewFile.write('<!-- if #9 -->\n')
#print('In the transfer options section')
#print('transferOptionsCounter' + str(transferOptionsCounter: ) + "\n")
#NewFile.write('<!-- transferOptionsCounter ' + str(transferOptionsCounter) +'-->')
if transferOptionsCounter == 1:
NewFile.write(line)
NewFile.write(' <gmd:transferOptions>\n')
NewFile.write(' <gmd:MD_DigitalTransferOptions>\n')
NewFile.write(' <gmd:onLine>\n')
NewFile.write(' <gmd:CI_OnlineResource>\n')
WMSFiller(mainTheme)
NewFile.write(' <gmd:function>\n')
NewFile.write(' <gmd:CI_OnLineFunctionCode codeList="http://www.isotc211.org/2005/resources/Codelist/gmxCodelists.xml#CI_OnlineFunctionCode"\n')
NewFile.write(' codeListValue="search">search\n')
NewFile.write(' </gmd:CI_OnLineFunctionCode>\n')
NewFile.write(' </gmd:function>\n')
NewFile.write(' </gmd:CI_OnlineResource>\n')
NewFile.write(' </gmd:onLine>\n')
NewFile.write(' </gmd:MD_DigitalTransferOptions>\n')
NewFile.write(' </gmd:transferOptions>\n')
transferOptionsCounter += 1
NewFile.write(' <gmd:transferOptions>\n')
NewFile.write(' <gmd:MD_DigitalTransferOptions>\n')
NewFile.write(' <gmd:onLine>\n')
NewFile.write(' <gmd:CI_OnlineResource>\n')
EAFileFiller(mainTheme)
NewFile.write(' <gmd:function>\n')
NewFile.write(
' <gmd:CI_OnLineFunctionCode codeList="http://www.isotc211.org/2005/resources/Codelist/gmxCodelists.xml#CI_OnlineFunctionCode"\n')
NewFile.write(' codeListValue="download">download\n')
NewFile.write(' </gmd:CI_OnLineFunctionCode>\n')
NewFile.write(' </gmd:function>\n')
NewFile.write(' </gmd:CI_OnlineResource>\n')
NewFile.write(' </gmd:onLine>\n')
NewFile.write(' </gmd:MD_DigitalTransferOptions>\n')
NewFile.write(' </gmd:transferOptions>\n')
else:
NewFile.write(line)
transferOptionsCounter += 1
elif re.search('</gmd:title>',line,flags=0):
#NewFile.write('<!-- if #10 -->\n')
if endTitleCounter ==0:
endTitleCounter+=1
else:
NewFile.write(line)
elif re.search('</gco:CharacterString>',line,flags=0):
#NewFile.write('<!-- if #11 TitleEndCharacterString:' + TitleEndCharacterString + '\n nationalPlaceInd: ' + nationalPlaceInd +'-->\n')
if TitleEndCharacterString == 'yes':
NewFile.write('<!-- 11az -->')
TitleEndCharacterString='no'
continue
elif KeywordGood == 'no':
NewFile.write('<!-- 11a -->')
if keywordind== 'yes':
NewFile.write('<!-- if #11b -->\n')
print(line)
#print('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA')
#print('keywordCounter: ' + str(keywordCounter))
#print('KeywordGood = ' + KeywordGood)
#print('BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB')
if KeywordGood == 'yes':
NewFile.write('<!-- 11c -->')
if re.search('State or Equivalent Entity',line,flags=0):
StateEntityCounter+=1
if StateEntityCounter >1:
continue
else:
NewFile.write(line)
print('Printing the Keyword!!!!!!!!!!!!!!')
keywordind='no'
else:
NewFile.write(line)
print('Printing the Keyword!!!!!!!!!!!!!!')
keywordind = 'no'
else:
#print('Now writing' + line + 'to the NationalPlace ')
#NewFile.write('<!-- Now writing' + line + 'to the NationalPlace ')
NationalPlace.append(line)
keywordind = 'no'
elif re.search('<gco:CharacterString>MAF/TIGER</gco:CharacterString>', line, flags=0):
NewFile.write(line)
MafTigerInd = 'yes'
#NewFile.write('<!-- MafTigerind: ' + MafTigerInd + ' -->\n')
else:
continue
elif datasetUriind =='yes':
doubleSlashLoc=line.find('//')
postSlash=line[doubleSlashLoc:]
newUrl='<gco:CharacterString>https:' + postSlash
NewFile.write(newUrl)
datasetUriind = 'no'
elif InCitInd == 'yes':
InCitInd ='no'
continue
else:
NewFile.write(line)
'''
#NewFile.write('<!-- if #11a -->\n')
print(line)
print('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA')
print ('keywordCounter: ' + str(keywordCounter))
print ('KeywordGood = ' + KeywordGood)
print('BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB')
if KeywordGood =='yes':
NewFile.write(line)
print('Printing the Keyword!!!!!!!!!!!!!!')
else:
print('Now writing' + line + 'to the NationalPlace ')
NationalPlace.append(line)
'''
elif re.search(' <gmd:edition>',line,flags=0):
#NewFile.write('<!-- if #12 -->\n')
NewFile.write(line)
NewFile.write(' <gco:CharacterString>2020</gco:CharacterString>')
elif re.search('http://www2.census.gov/geo/tiger/TIGER2020',line,flags=0):
#NewFile.write('<!-- if #13 -->\n')
semiLoc=line.rfind(':')
lastpart=line[semiLoc:]
CorrectedHttp=' <gco:CharacterString>https' + lastpart
#NewFile.write('<!-- string corrected -->')
elif re.search(' </gmd:edition>',line, flags=0):
#NewFile.write('<!-- if #14 -->\n')
editionCounter+=1
#print('editionCounter: ' + str(editionCounter))
if editionCounter ==1:
NewFile.write(line)
NewFile.write(' <gmd:identifier>\n')
NewFile.write(' <gmd:MD_Identifier>\n')
NewFile.write(' <gmd:code>\n')
NewFile.write(' <gco:CharacterString>https://www.census.gov</gco:CharacterString>\n')
NewFile.write(' </gmd:code>\n')
NewFile.write(' </gmd:MD_Identifier>\n')
NewFile.write(' </gmd:identifier>\n')
else:
NewFile.write(line)
elif re.search('<gmd:extent/>',line, flags=0):
#NewFile.write('<!-- if #15 -->\n')
NewFile.write(' <gmd:extent>\n')
NewFile.write(' <gml:TimePeriod gml:id="timePeriod">\n')
NewFile.write(' <gml:beginPosition>2019-06</gml:beginPosition>\n')
NewFile.write(' <gml:endPosition>2020-05</gml:endPosition>\n')
NewFile.write(' </gml:TimePeriod>\n')
NewFile.write(' </gmd:extent>\n')
elif re.search('<gml:beginPosition/>',line,flags=0):
#NewFile.write('<!-- if #16 -->\n')
NewFile.write(' <gml:beginPosition>2019-06</gml:beginPosition>\n')
elif re.search('<gml:endPosition/>',line,flags=0):
#NewFile.write('<!-- if #17 -->\n')
NewFile.write(' <gml:endPosition>2020-05</gml:endPosition>\n')
elif re.search('<gmd:keyword>',line, flags=0):
#NewFile.write('<!-- if #18 -->\n')
#NewFile.write('<!-- if #18' + line + '-->\n')
keywordCounter+=1
#print('00000000000000000000000000000000000000000000000000000')
#print('keywordCounter: ' + str(keywordCounter))
#print(line)
keywordind='yes'
if keywordCounter <=PrePlace:
KeywordGood='yes'
NewFile.write(line)
DescriptiveKeywordsInd='off'
elif keywordCounter<StateKeywords:
KeywordGood = 'no'
DescriptiveKeywordsInd='off'
else:
if re.search('State or Equivalent Entity',line,flags=0):
continue
else:
NewFile.write(line)
KeywordGood = 'yes'
DescriptiveKeywordsInd = 'on'
elif re.search(' <gco:CharacterString>',line,flags=0):
#NewFile.write('<!-- if #19 -->\n')
print(line)
#print('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA')
#print ('keywordCounter: ' + str(keywordCounter))
#print ('KeywordGood = ' + KeywordGood)
#print('BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB')
if re.search('>ANSI INCITS 38:2009', line, flags=0):
#newLine = line + '</gco:CharacterString>'
#(ANSI INCITS 38-2009), Federal Information Processing Series (FIPS) – States/State Equivalents'
NewFile.write('<gco:CharacterString>National Standard Codes (ANSI INCITS 38-2009)</gco:CharacterString>' )
elif re.search('<gco:CharacterString>MAF/TIGER</gco:CharacterString>', line, flags=0):
NewFile.write(line)
MafTigerInd='yes'
NewFile.write('<!-- MafTigerind: ' + MafTigerInd+ ' -->\n')
else:
NewFile.write(line)
elif re.search('</gmd:keyword>',line,flags=0):
#NewFile.write('<!-- if #20 -->\n')
#print('Ending the keyword tag')
if KeywordGood =='no':
continue
else:
NewFile.write(line)
elif re.search('</gmd:descriptiveKeywords>',line,flags=0):
#NewFile.write('<!-- if #21 -->\n')
NewFile.write('<!-- Working with gmd:descriptiveKeywords DescriptiveKeywordsInd: ' + DescriptiveKeywordsInd + '-->')
if DescriptiveKeywordsInd=='on':
NewFile.write(line)
NewFile.write(' <gmd:descriptiveKeywords>')
NewFile.write(' <gmd:MD_Keywords>\n')
for item in NationalPlace:
#NewFile.write('<!-- item: ' + item + " -->")
if item != 'State or Equivalent Entity':
NewFile.write(' <gmd:keyword>\n')
NewFile.write(item + '\n')
NewFile.write(' </gmd:keyword>\n')
NewFile.write(' <gmd:type>\n')
NewFile.write(' <gmd:MD_KeywordTypeCode codeList="http://www.isotc211.org/2005/resources/Codelist/gmxCodelists.xml#MD_KeywordTypeCode"\n')
NewFile.write(' codeListValue="place"/>\n')
NewFile.write(' </gmd:type>\n')
NewFile.write(' <gmd:thesaurusName>\n')
NewFile.write(' <gmd:CI_Citation>\n')
NewFile.write(' <gmd:title>\n')
NewFile.write(' <gco:CharacterString>ISO 3166 Codes for the representation of names of countries and their subdivisions</gco:CharacterString>\n')
NewFile.write(' </gmd:title>\n')
NewFile.write(' <gmd:date gco:nilReason="unknown"/>\n')
NewFile.write(' </gmd:CI_Citation>\n')
NewFile.write(' </gmd:thesaurusName>\n')
NewFile.write(' </gmd:MD_Keywords>\n')
NewFile.write(' </gmd:descriptiveKeywords>\n')
DescriptiveKeywordsInd='off'
else:
NewFile.write(line)
elif re.search ('<gmd:dataSetURI>',line,flags=0):
datasetUriind='yes'
NewFile.write(line)
elif re.search('ANSI INCITS 31:2009',line,flags=0):
#NewFile.write('<!--ANSI INCITS 31:2009 -->\n')
InCitInd='yes'
continue
elif re.search ('(Formerly FIPS 8-6)',line, flags=0):
#NewFile.write('<!--ANSI INCITS 31:2009 -->\n')
continue
elif re.search ('<gmd:date>',line,flags=0):
if re.search ('<gmd:date gco:nilReason="unknown"/>',line, flags=0):
NewFile.write(line)
elif MafTigerInd =='yes':
NewFile.write(' <gmd:date gco:nilReason="unknown"/>')
else:
NewFile.write(line)
elif re.search('<gmd:CI_Date>',line,flags=0):
if MafTigerInd =='yes':
continue
else:
NewFile.write(line)
elif re.search('<gco:Date>Unpublished material</gco:Date>',line,flags=0):
if MafTigerInd =='yes':
continue
else:
NewFile.write(line)
elif re.search('</gmd:date>',line,flags=0):
if MafTigerInd == 'yes':
gmdDateCounter+=1
if gmdDateCounter ==1:
continue
else:
MafTigerInd='no'
else:
NewFile.write(line)
elif re.search('<gmd:dateType>',line,flags=0):
if MafTigerInd == 'yes':
continue
else:
NewFile.write(line)
elif re.search(' <gmd:CI_DateTypeCode',line, flags=0):
if MafTigerInd =='yes':
continue
else:
NewFile.write(line)
elif re.search('codeListValue="publication date"',line, flags=0):
if MafTigerInd == 'yes':
continue
else:
NewFile.write(line)
elif re.search('</gmd:CI_DateTypeCode>',line, flags=0):
if MafTigerInd == 'yes':
continue
else:
NewFile.write(line)
elif re.search('</gmd:dateType>',line, flags=0):
if MafTigerInd == 'yes':
continue
else:
NewFile.write(line)
elif re.search('</gmd:CI_Date>',line, flags=0):
if MafTigerInd == 'yes':
continue
else:
NewFile.write(line)
else:
NewFile.write(line)
#print(line)
NewFileArray.append(OutFile)
NewFile.close()
print("xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\n")
for newFile in NewFileArray:
#print (newFile)
newFileCorrectLoc=newFile.find('_corrected')
preCorret=newFile[0:newFileCorrectLoc]
postCorrect=newFile[newFileCorrectLoc+11:]
DestFile= preCorret+postCorrect
#print(preCorret)
#print (postCorrect)
shutil.copyfile(newFile,DestFile)
#newFile.close
'''
for newFile in NewFileArray:
os.remove(newFile)
'''
print ("Done! "+ str(FileCounter) + " files have been processed at "+ presentTime + "!")
sys.exit(1)
| 62.9754
| 344
| 0.545958
| 9,932
| 102,398
| 5.614378
| 0.051349
| 0.173236
| 0.109752
| 0.053657
| 0.862254
| 0.846616
| 0.82888
| 0.819357
| 0.800796
| 0.779599
| 0
| 0.014985
| 0.332008
| 102,398
| 1,626
| 345
| 62.9754
| 0.800222
| 0.030118
| 0
| 0.706667
| 0
| 0.087018
| 0.507298
| 0.110874
| 0
| 0
| 0
| 0
| 0
| 1
| 0.005614
| false
| 0.008421
| 0.007719
| 0
| 0.050526
| 0.010526
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f2e79495573e62f3873db4ca7c58977203422284
| 6,802
|
py
|
Python
|
Painter_RandomizedBenchmarking/DRAG_utils.py
|
sherryue123/Labber-Drivers
|
90fc217db4158857f650122cd780f18fcf6d9a78
|
[
"MIT"
] | 3
|
2019-06-18T21:14:50.000Z
|
2020-10-11T19:05:35.000Z
|
Painter_RandomizedBenchmarking/DRAG_utils.py
|
sherryue123/Labber-Drivers
|
90fc217db4158857f650122cd780f18fcf6d9a78
|
[
"MIT"
] | null | null | null |
Painter_RandomizedBenchmarking/DRAG_utils.py
|
sherryue123/Labber-Drivers
|
90fc217db4158857f650122cd780f18fcf6d9a78
|
[
"MIT"
] | 1
|
2020-04-03T16:54:28.000Z
|
2020-04-03T16:54:28.000Z
|
""" A super cool file for helping with DRAG stuff."""
import numpy as np
import scipy
import dsp_utils
# Empty envelopes
def empty_detuning_envelope(t, args):
return 0
def empety_x_envelope(t, args):
return 0
def empty_y_envelope(t, args):
return 0
# Helper functions for building gaussian pulses
def gaussian(t, sigma):
return np.exp(-t**2/(2*sigma**2))
def truncated_gaussian(t, sigma, t0, tn):
erf_part = scipy.special.erf((tn)/(np.sqrt(2)*sigma))
numerator = gaussian(t-t0, sigma) \
- gaussian(tn, sigma)
denominator = np.sqrt(2*np.pi*sigma**2)*erf_part \
- 2*tn*gaussian(tn, sigma)
return numerator/denominator
def truncated_gaussian_derivative(t, sigma, t0, tn):
erf_part = scipy.special.erf((tn)/(np.sqrt(2)*sigma))
numerator = -(t - t0)/(sigma**2) \
* gaussian(t-t0, sigma)
denominator = np.sqrt(2*np.pi*sigma**2)*erf_part \
- 2*tn*gaussian(tn, sigma)
return numerator/denominator
# Functions for bottom two level DRAG in an anharmonic oscillator
def x_envelope_ge(t, args):
erf_part = scipy.special.erf(args['tn']/(np.sqrt(2)*args['tsigma']))
numerator = np.exp(-(t - args['tg'])**2/(2*args['tsigma']**2)) - \
np.exp(-args['tn']**2/(2*args['tsigma']**2))
denominator = np.sqrt(2*np.pi*args['tsigma']**2)*erf_part - \
2*args['tn']*np.exp(-args['tn']**2/(2*args['tsigma']**2))
return args['x_coeff']*args['A']*numerator/denominator
def y_envelope_ge(t, args):
erf_part = scipy.special.erf(args['tn']/(np.sqrt(2)*args['tsigma']))
numerator = -(t-args['tg'])/(args['tsigma']**2) * \
np.exp(-(t - args['tg'])**2/(2*args['tsigma']**2))
denominator = np.sqrt(2*np.pi*args['tsigma']**2)*erf_part - \
2*args['tn']*np.exp(-args['tn']**2/(2*args['tsigma']**2))
return args['y_coeff']*args['A']*numerator/denominator
def det_envelope_ge(t, args):
erf_part = scipy.special.erf(args['tn']/(np.sqrt(2)*args['tsigma']))
numerator = np.exp(-(t - args['tg'])**2/(2*args['tsigma']**2)) - \
np.exp(-args['tn']**2/(2*args['tsigma']**2))
denominator = np.sqrt(2*np.pi*args['tsigma']**2)*erf_part - \
2*args['tn']*np.exp(-args['tn']**2/(2*args['tsigma']**2))
return args['det_coeff']*(args['A']*numerator/denominator)**2
# Functions for intermediate DRAG in an anharmonic oscillator
def x_envelope_ef(t, args):
"""In-Phase Quadrature Envelope for e->f DRAG."""
return args['A']*truncated_gaussian(t, args['sigma'], args['t_g']/2, args['t_n']/2)
def y_envelope_ef(t, args):
"""Out-of-Phase Quadrature Envelope for e->f DRAG."""
anharms = args['anharms']
couplings = args['couplings']
e = args['e']
g = args['g']
couplings = [c/g for c in couplings]
coeff = -np.sqrt(couplings[e-1]**2 \
+ (anharms[e+2]**2/anharms[e-1]**2) \
* couplings[e+1]**2) / (2*anharms[e+2])
return args['A']*coeff*truncated_gaussian_derivative(t, args['sigma'], args['t_g']/2, args['t_n']/2)
def detuning_envelope_ef(t, args):
"""Detuning envelope for e->f DRAG."""
anharms = args['anharms']
couplings = args['couplings']
e = args['e']
g = args['g']
couplings = [c/g for c in couplings]
coeff = (couplings[e-1]**2 \
- (anharms[e+2]**2/anharms[e-1]**2) \
* couplings[e+1]) / (4*anharms[e+2])
return coeff*(args['A']*truncated_gaussian(t, args['sigma'], args['t_g']/2, args['t_n']/2))**2
def create_ge_envelopes(sample_rate,
gate_time,
envelope_args,
modulation_args=None,
quantization_args=None,
upsampling_args=None,
noise_args=None):
xs, times = dsp_utils.create_custom_signal(
x_envelope_ge,
sample_rate,
gate_time,
envelope_args=envelope_args,
modulation_args=modulation_args,
quantization_args=quantization_args,
upsampling_args=upsampling_args,
noise_args=noise_args)
ys, _ = dsp_utils.create_custom_signal(
y_envelope_ge,
sample_rate,
gate_time,
envelope_args=envelope_args,
modulation_args=modulation_args,
quantization_args=quantization_args,
upsampling_args=upsampling_args,
noise_args=noise_args)
dets, _ = dsp_utils.create_custom_signal(
det_envelope_ge,
sample_rate,
gate_time,
envelope_args=envelope_args,
modulation_args=modulation_args,
quantization_args=quantization_args,
upsampling_args=upsampling_args,
noise_args=noise_args)
return times, xs, ys, dets
def create_constant_detuning_DRAG_envelopes(sample_rate,
gate_time,
envelope_args,
modulation_args=None,
quantization_args=None,
upsampling_args=None,
noise_args=None):
xs, times = dsp_utils.create_custom_signal(
x_envelope_ge,
sample_rate,
gate_time,
envelope_args=envelope_args,
modulation_args=modulation_args,
quantization_args=quantization_args,
upsampling_args=upsampling_args,
noise_args=noise_args)
ys, _ = dsp_utils.create_custom_signal(
y_envelope_ge,
sample_rate,
gate_time,
envelope_args=envelope_args,
modulation_args=modulation_args,
quantization_args=quantization_args,
upsampling_args=upsampling_args,
noise_args=noise_args)
def const_function(t, args=None):
return envelope_args['det_coeff']
dets, _ = dsp_utils.create_custom_signal(
const_function,
sample_rate,
gate_time,
envelope_args=None,
modulation_args=None,
quantization_args=quantization_args,
upsampling_args=upsampling_args,
noise_args=noise_args)
return times, xs, ys, dets
| 38.213483
| 104
| 0.543664
| 811
| 6,802
| 4.348952
| 0.114673
| 0.0241
| 0.037426
| 0.027219
| 0.824213
| 0.801815
| 0.744542
| 0.731216
| 0.710802
| 0.710802
| 0
| 0.018285
| 0.32461
| 6,802
| 177
| 105
| 38.429379
| 0.749456
| 0.052632
| 0
| 0.707143
| 0
| 0
| 0.035369
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.107143
| false
| 0
| 0.021429
| 0.035714
| 0.235714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f2ef79fff16604edae6b25ba5e265ecbd4b6bfb2
| 111
|
py
|
Python
|
Python/OneLang/Generator/IGeneratorPlugin.py
|
onelang/OneLang-CrossCompiled
|
ed67f2b57e7181712c7d8c2ba85f23b3812d8e3a
|
[
"MIT"
] | 2
|
2020-11-15T23:38:23.000Z
|
2020-11-16T00:54:25.000Z
|
Python/OneLang/Generator/IGeneratorPlugin.py
|
onelang/OneLang-CrossCompiled
|
ed67f2b57e7181712c7d8c2ba85f23b3812d8e3a
|
[
"MIT"
] | null | null | null |
Python/OneLang/Generator/IGeneratorPlugin.py
|
onelang/OneLang-CrossCompiled
|
ed67f2b57e7181712c7d8c2ba85f23b3812d8e3a
|
[
"MIT"
] | null | null | null |
from onelang_core import *
import OneLang.One.Ast.Interfaces as ints
import OneLang.One.Ast.Statements as stats
| 37
| 42
| 0.837838
| 18
| 111
| 5.111111
| 0.611111
| 0.282609
| 0.347826
| 0.413043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099099
| 111
| 3
| 42
| 37
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
843af5a525110e9d08f09b3afa5464e12516cc00
| 4,021
|
py
|
Python
|
neuralwoz/loss.py
|
naver-ai/neuralwoz
|
89f10904256c0df33e14c05e8581421d52b63105
|
[
"Apache-2.0"
] | 25
|
2021-07-22T07:40:24.000Z
|
2022-02-09T01:42:43.000Z
|
neuralwoz/loss.py
|
naver-ai/neuralwoz
|
89f10904256c0df33e14c05e8581421d52b63105
|
[
"Apache-2.0"
] | null | null | null |
neuralwoz/loss.py
|
naver-ai/neuralwoz
|
89f10904256c0df33e14c05e8581421d52b63105
|
[
"Apache-2.0"
] | 3
|
2021-07-23T01:14:38.000Z
|
2021-08-19T10:36:35.000Z
|
"""
NeuralWOZ
Copyright (c) 2021-present NAVER Corp.
Apache License v2.0
"""
import torch
class LabelSmoothingLoss(torch.nn.Module):
"""
With label smoothing,
KL-divergence between q_{smoothed ground truth prob.}(w)
and p_{prob. computed by model}(w) is minimized.
"""
def __init__(self, label_smoothing, tgt_size, pad_id=1, ignore_index=-100):
assert 0.0 < label_smoothing <= 1.0
super(LabelSmoothingLoss, self).__init__()
self.eps = label_smoothing
self.n_class = tgt_size
self.ignore_index = ignore_index
self.pad_id = pad_id
def forward(self, output, target):
"""
output (FloatTensor): batch_size x n_classes
target (LongTensor): batch_size
"""
mask = target.ne(self.ignore_index).float()
target_ = target.masked_fill(target.eq(self.ignore_index), self.pad_id)
one_hot = torch.zeros_like(output).scatter(1, target_.view(-1, 1), 1)
one_hot = one_hot * (1 - self.eps) + (1 - one_hot) * self.eps / (self.n_class - 1)
one_hot = one_hot.to(target.device)
log_prb = torch.nn.functional.log_softmax(output, dim=1)
loss = -(one_hot * log_prb).sum(dim=1)
loss = loss * mask
return loss.mean()
class LabelSmoothingWithoutSoftmaxLoss(torch.nn.Module):
"""
With label smoothing,
KL-divergence between q_{smoothed ground truth prob.}(w)
and p_{prob. computed by model}(w) is minimized.
"""
def __init__(self, label_smoothing, tgt_size, pad_id=1, ignore_index=-100):
assert 0.0 < label_smoothing <= 1.0
super(LabelSmoothingWithoutSoftmaxLoss, self).__init__()
self.eps = label_smoothing
self.n_class = tgt_size
self.ignore_index = ignore_index
self.pad_id = pad_id
def forward(self, output, target):
"""
output (FloatTensor): batch_size x n_classes
target (LongTensor): batch_size
"""
mask = target.ne(self.ignore_index).float()
target_ = target.masked_fill(target.eq(self.ignore_index), self.pad_id)
one_hot = torch.zeros_like(output).scatter(1, target_.view(-1, 1), 1)
one_hot = one_hot * (1 - self.eps) + (1 - one_hot) * self.eps / (self.n_class - 1)
one_hot = one_hot.to(target.device)
log_prb = torch.log(output)
loss = -(one_hot * log_prb).sum(dim=1)
loss = loss * mask
return loss.mean()
class CrossEntropyWithoutSoftmaxLoss(torch.nn.Module):
def __init__(self, tgt_size, pad_id=1, ignore_index=-100):
super(CrossEntropyWithoutSoftmaxLoss, self).__init__()
self.n_class = tgt_size
self.ignore_index = ignore_index
self.pad_id = pad_id
def forward(self, output, target):
"""
output (FloatTensor): batch_size x n_classes
target (LongTensor): batch_size
"""
mask = target.ne(self.ignore_index).float()
target_ = target.masked_fill(target.eq(self.ignore_index), self.pad_id)
one_hot = torch.zeros_like(output).scatter(1, target_.view(-1, 1), 1)
one_hot = one_hot.to(target.device)
log_prb = torch.log(output)
loss = -(one_hot * log_prb).sum(dim=1)
loss = loss * mask
return loss.mean()
class WeightedChoiceLoss(torch.nn.Module):
def __init__(self, beta):
super(WeightedChoiceLoss, self).__init__()
self.beta = beta
def forward(self, output, target, weight):
"""
output (FloatTensor): batch_size x n_classes
target (LongTensor): batch_size
weight (FloatTensor): batch_size (0. or 1.)
"""
one_hot = torch.zeros_like(output).scatter(1, target.view(-1, 1), 1)
one_hot = one_hot.to(target.device)
log_prb = torch.nn.functional.log_softmax(output, dim=1)
loss = -(one_hot * log_prb).sum(1)
loss = self.beta * (loss * weight) + (1 - weight) * loss
return loss.mean()
| 35.901786
| 90
| 0.623725
| 533
| 4,021
| 4.446529
| 0.165103
| 0.055696
| 0.056962
| 0.04557
| 0.834177
| 0.823207
| 0.802954
| 0.802954
| 0.791561
| 0.791561
| 0
| 0.019025
| 0.254912
| 4,021
| 111
| 91
| 36.225225
| 0.772029
| 0.168117
| 0
| 0.746032
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.031746
| 1
| 0.126984
| false
| 0
| 0.015873
| 0
| 0.269841
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ffd60fe1921da0cd5f09233b30ce0f7ecaf64529
| 126,011
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_tty_server_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_tty_server_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_tty_server_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
""" Cisco_IOS_XR_tty_server_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR tty\-server package operational data.
This module contains definitions
for the following management objects\:
tty\: TTY Line Configuration
Copyright (c) 2013\-2016 by Cisco Systems, Inc.
All rights reserved.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class LineStateEnum(Enum):
"""
LineStateEnum
Line state
.. data:: none = 0
Line not connected
.. data:: registered = 1
Line registered
.. data:: in_use = 2
Line active and in use
"""
none = 0
registered = 1
in_use = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['LineStateEnum']
class SessionOperationEnum(Enum):
"""
SessionOperationEnum
Session operation
.. data:: none = 0
No sessions on the line
.. data:: setup = 1
Session getting set up
.. data:: shell = 2
Session active with a shell
.. data:: transitioning = 3
Session in transitioning phase
.. data:: packet = 4
Session ready to receive packets
"""
none = 0
setup = 1
shell = 2
transitioning = 3
packet = 4
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['SessionOperationEnum']
class Tty(object):
"""
TTY Line Configuration
.. attribute:: auxiliary_nodes
List of Nodes attached with an auxiliary line
**type**\: :py:class:`AuxiliaryNodes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes>`
.. attribute:: console_nodes
List of Nodes for console
**type**\: :py:class:`ConsoleNodes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes>`
.. attribute:: vty_lines
List of VTY lines
**type**\: :py:class:`VtyLines <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.auxiliary_nodes = Tty.AuxiliaryNodes()
self.auxiliary_nodes.parent = self
self.console_nodes = Tty.ConsoleNodes()
self.console_nodes.parent = self
self.vty_lines = Tty.VtyLines()
self.vty_lines.parent = self
class ConsoleNodes(object):
"""
List of Nodes for console
.. attribute:: console_node
Console line configuration on a node
**type**\: list of :py:class:`ConsoleNode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.console_node = YList()
self.console_node.parent = self
self.console_node.name = 'console_node'
class ConsoleNode(object):
"""
Console line configuration on a node
.. attribute:: id <key>
Node ID
**type**\: str
**pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+)
.. attribute:: console_line
Console line
**type**\: :py:class:`ConsoleLine <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode.ConsoleLine>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.id = None
self.console_line = Tty.ConsoleNodes.ConsoleNode.ConsoleLine()
self.console_line.parent = self
class ConsoleLine(object):
"""
Console line
.. attribute:: configuration
Configuration information of the line
**type**\: :py:class:`Configuration <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode.ConsoleLine.Configuration>`
.. attribute:: console_statistics
Statistics of the console line
**type**\: :py:class:`ConsoleStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics>`
.. attribute:: state
Line state information
**type**\: :py:class:`State <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode.ConsoleLine.State>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.configuration = Tty.ConsoleNodes.ConsoleNode.ConsoleLine.Configuration()
self.configuration.parent = self
self.console_statistics = Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics()
self.console_statistics.parent = self
self.state = Tty.ConsoleNodes.ConsoleNode.ConsoleLine.State()
self.state.parent = self
class ConsoleStatistics(object):
"""
Statistics of the console line
.. attribute:: aaa
AAA related statistics
**type**\: :py:class:`Aaa <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics.Aaa>`
.. attribute:: exec_
Exec related statistics
**type**\: :py:class:`Exec_ <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics.Exec_>`
.. attribute:: general_statistics
General statistics of line
**type**\: :py:class:`GeneralStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics.GeneralStatistics>`
.. attribute:: rs232
RS232 statistics of console line
**type**\: :py:class:`Rs232 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics.Rs232>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.aaa = Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics.Aaa()
self.aaa.parent = self
self.exec_ = Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics.Exec_()
self.exec_.parent = self
self.general_statistics = Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics.GeneralStatistics()
self.general_statistics.parent = self
self.rs232 = Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics.Rs232()
self.rs232.parent = self
class Rs232(object):
"""
RS232 statistics of console line
.. attribute:: baud_rate
Inbound/Outbound baud rate in bps
**type**\: int
**range:** 0..4294967295
**units**\: bit/s
.. attribute:: data_bits
Number of databits
**type**\: int
**range:** 0..4294967295
**units**\: bit
.. attribute:: exec_disabled
Exec disabled on TTY
**type**\: bool
.. attribute:: framing_error_count
Framing error count
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_flow_control_status
Hardware flow control status
**type**\: int
**range:** 0..4294967295
.. attribute:: overrun_error_count
Overrun error count
**type**\: int
**range:** 0..4294967295
.. attribute:: parity_error_count
Parity error count
**type**\: int
**range:** 0..4294967295
.. attribute:: parity_status
Parity status
**type**\: int
**range:** 0..4294967295
.. attribute:: stop_bits
Number of stopbits
**type**\: int
**range:** 0..4294967295
**units**\: bit
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.baud_rate = None
self.data_bits = None
self.exec_disabled = None
self.framing_error_count = None
self.hardware_flow_control_status = None
self.overrun_error_count = None
self.parity_error_count = None
self.parity_status = None
self.stop_bits = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:rs232'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.baud_rate is not None:
return True
if self.data_bits is not None:
return True
if self.exec_disabled is not None:
return True
if self.framing_error_count is not None:
return True
if self.hardware_flow_control_status is not None:
return True
if self.overrun_error_count is not None:
return True
if self.parity_error_count is not None:
return True
if self.parity_status is not None:
return True
if self.stop_bits is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics.Rs232']['meta_info']
class GeneralStatistics(object):
"""
General statistics of line
.. attribute:: absolute_timeout
Absolute timeout period
**type**\: int
**range:** 0..4294967295
.. attribute:: async_interface
Usable as async interface
**type**\: bool
.. attribute:: domain_lookup_enabled
DNS resolution enabled
**type**\: bool
.. attribute:: flow_control_start_character
Software flow control start char
**type**\: int
**range:** \-128..127
.. attribute:: flow_control_stop_character
Software flow control stop char
**type**\: int
**range:** \-128..127
.. attribute:: idle_time
TTY idle time
**type**\: int
**range:** 0..4294967295
.. attribute:: motd_banner_enabled
MOTD banner enabled
**type**\: bool
.. attribute:: private_flag
TTY private flag
**type**\: bool
.. attribute:: terminal_length
Terminal length
**type**\: int
**range:** 0..4294967295
.. attribute:: terminal_type
Terminal type
**type**\: str
.. attribute:: terminal_width
Line width
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.absolute_timeout = None
self.async_interface = None
self.domain_lookup_enabled = None
self.flow_control_start_character = None
self.flow_control_stop_character = None
self.idle_time = None
self.motd_banner_enabled = None
self.private_flag = None
self.terminal_length = None
self.terminal_type = None
self.terminal_width = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:general-statistics'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.absolute_timeout is not None:
return True
if self.async_interface is not None:
return True
if self.domain_lookup_enabled is not None:
return True
if self.flow_control_start_character is not None:
return True
if self.flow_control_stop_character is not None:
return True
if self.idle_time is not None:
return True
if self.motd_banner_enabled is not None:
return True
if self.private_flag is not None:
return True
if self.terminal_length is not None:
return True
if self.terminal_type is not None:
return True
if self.terminal_width is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics.GeneralStatistics']['meta_info']
class Exec_(object):
"""
Exec related statistics
.. attribute:: time_stamp_enabled
Specifies whether timestamp is enabled or not
**type**\: bool
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.time_stamp_enabled = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:exec'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.time_stamp_enabled is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics.Exec_']['meta_info']
class Aaa(object):
"""
AAA related statistics
.. attribute:: user_name
The authenticated username
**type**\: str
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.user_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:aaa'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.user_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics.Aaa']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:console-statistics'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.aaa is not None and self.aaa._has_data():
return True
if self.exec_ is not None and self.exec_._has_data():
return True
if self.general_statistics is not None and self.general_statistics._has_data():
return True
if self.rs232 is not None and self.rs232._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics']['meta_info']
class State(object):
"""
Line state information
.. attribute:: general
General information
**type**\: :py:class:`General <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode.ConsoleLine.State.General>`
.. attribute:: template
Information related to template applied to the line
**type**\: :py:class:`Template <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode.ConsoleLine.State.Template>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.general = Tty.ConsoleNodes.ConsoleNode.ConsoleLine.State.General()
self.general.parent = self
self.template = Tty.ConsoleNodes.ConsoleNode.ConsoleLine.State.Template()
self.template.parent = self
class Template(object):
"""
Information related to template applied to the
line
.. attribute:: name
Name of the template
**type**\: str
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:template'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode.ConsoleLine.State.Template']['meta_info']
class General(object):
"""
General information
.. attribute:: general_state
State of the line
**type**\: :py:class:`LineStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.LineStateEnum>`
.. attribute:: operation
application running of on the tty line
**type**\: :py:class:`SessionOperationEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.SessionOperationEnum>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.general_state = None
self.operation = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:general'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.general_state is not None:
return True
if self.operation is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode.ConsoleLine.State.General']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.general is not None and self.general._has_data():
return True
if self.template is not None and self.template._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode.ConsoleLine.State']['meta_info']
class Configuration(object):
"""
Configuration information of the line
.. attribute:: connection_configuration
Conection configuration information
**type**\: :py:class:`ConnectionConfiguration <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode.ConsoleLine.Configuration.ConnectionConfiguration>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.connection_configuration = Tty.ConsoleNodes.ConsoleNode.ConsoleLine.Configuration.ConnectionConfiguration()
self.connection_configuration.parent = self
class ConnectionConfiguration(object):
"""
Conection configuration information
.. attribute:: acl_in
ACL for inbound traffic
**type**\: str
.. attribute:: acl_out
ACL for outbound traffic
**type**\: str
.. attribute:: transport_input
Protocols to use when connecting to the terminal server
**type**\: :py:class:`TransportInput <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode.ConsoleLine.Configuration.ConnectionConfiguration.TransportInput>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.acl_in = None
self.acl_out = None
self.transport_input = Tty.ConsoleNodes.ConsoleNode.ConsoleLine.Configuration.ConnectionConfiguration.TransportInput()
self.transport_input.parent = self
class TransportInput(object):
"""
Protocols to use when connecting to the
terminal server
.. attribute:: none
Not used
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: protocol1
Transport protocol1
**type**\: :py:class:`TtyTransportProtocolEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_management_datatypes.TtyTransportProtocolEnum>`
.. attribute:: protocol2
Transport protocol2
**type**\: :py:class:`TtyTransportProtocolEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_management_datatypes.TtyTransportProtocolEnum>`
.. attribute:: select
Choose transport protocols
**type**\: :py:class:`TtyTransportProtocolSelectEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_management_datatypes.TtyTransportProtocolSelectEnum>`
**default value**\: all
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.none = None
self.protocol1 = None
self.protocol2 = None
self.select = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:transport-input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.none is not None:
return True
if self.protocol1 is not None:
return True
if self.protocol2 is not None:
return True
if self.select is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode.ConsoleLine.Configuration.ConnectionConfiguration.TransportInput']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:connection-configuration'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.acl_in is not None:
return True
if self.acl_out is not None:
return True
if self.transport_input is not None and self.transport_input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode.ConsoleLine.Configuration.ConnectionConfiguration']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:configuration'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.connection_configuration is not None and self.connection_configuration._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode.ConsoleLine.Configuration']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:console-line'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.configuration is not None and self.configuration._has_data():
return True
if self.console_statistics is not None and self.console_statistics._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode.ConsoleLine']['meta_info']
@property
def _common_path(self):
if self.id is None:
raise YPYModelError('Key property id is None')
return '/Cisco-IOS-XR-tty-server-oper:tty/Cisco-IOS-XR-tty-server-oper:console-nodes/Cisco-IOS-XR-tty-server-oper:console-node[Cisco-IOS-XR-tty-server-oper:id = ' + str(self.id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.id is not None:
return True
if self.console_line is not None and self.console_line._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tty-server-oper:tty/Cisco-IOS-XR-tty-server-oper:console-nodes'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.console_node is not None:
for child_ref in self.console_node:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes']['meta_info']
class VtyLines(object):
"""
List of VTY lines
.. attribute:: vty_line
VTY Line
**type**\: list of :py:class:`VtyLine <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.vty_line = YList()
self.vty_line.parent = self
self.vty_line.name = 'vty_line'
class VtyLine(object):
"""
VTY Line
.. attribute:: line_number <key>
VTY Line number
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: configuration
Configuration information of the line
**type**\: :py:class:`Configuration <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.Configuration>`
.. attribute:: sessions
Outgoing sessions
**type**\: :py:class:`Sessions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.Sessions>`
.. attribute:: state
Line state information
**type**\: :py:class:`State <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.State>`
.. attribute:: vty_statistics
Statistics of the VTY line
**type**\: :py:class:`VtyStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.VtyStatistics>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.line_number = None
self.configuration = Tty.VtyLines.VtyLine.Configuration()
self.configuration.parent = self
self.sessions = Tty.VtyLines.VtyLine.Sessions()
self.sessions.parent = self
self.state = Tty.VtyLines.VtyLine.State()
self.state.parent = self
self.vty_statistics = Tty.VtyLines.VtyLine.VtyStatistics()
self.vty_statistics.parent = self
class VtyStatistics(object):
"""
Statistics of the VTY line
.. attribute:: aaa
AAA related statistics
**type**\: :py:class:`Aaa <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.VtyStatistics.Aaa>`
.. attribute:: connection
Connection related statistics
**type**\: :py:class:`Connection <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.VtyStatistics.Connection>`
.. attribute:: exec_
Exec related statistics
**type**\: :py:class:`Exec_ <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.VtyStatistics.Exec_>`
.. attribute:: general_statistics
General statistics of line
**type**\: :py:class:`GeneralStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.VtyStatistics.GeneralStatistics>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.aaa = Tty.VtyLines.VtyLine.VtyStatistics.Aaa()
self.aaa.parent = self
self.connection = Tty.VtyLines.VtyLine.VtyStatistics.Connection()
self.connection.parent = self
self.exec_ = Tty.VtyLines.VtyLine.VtyStatistics.Exec_()
self.exec_.parent = self
self.general_statistics = Tty.VtyLines.VtyLine.VtyStatistics.GeneralStatistics()
self.general_statistics.parent = self
class Connection(object):
"""
Connection related statistics
.. attribute:: host_address_family
Incoming host address family
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_host_address
Incoming host address(max)
**type**\: str
**length:** 0..46
.. attribute:: service
Input transport
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.host_address_family = None
self.incoming_host_address = None
self.service = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:connection'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.host_address_family is not None:
return True
if self.incoming_host_address is not None:
return True
if self.service is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.VtyStatistics.Connection']['meta_info']
class GeneralStatistics(object):
"""
General statistics of line
.. attribute:: absolute_timeout
Absolute timeout period
**type**\: int
**range:** 0..4294967295
.. attribute:: async_interface
Usable as async interface
**type**\: bool
.. attribute:: domain_lookup_enabled
DNS resolution enabled
**type**\: bool
.. attribute:: flow_control_start_character
Software flow control start char
**type**\: int
**range:** \-128..127
.. attribute:: flow_control_stop_character
Software flow control stop char
**type**\: int
**range:** \-128..127
.. attribute:: idle_time
TTY idle time
**type**\: int
**range:** 0..4294967295
.. attribute:: motd_banner_enabled
MOTD banner enabled
**type**\: bool
.. attribute:: private_flag
TTY private flag
**type**\: bool
.. attribute:: terminal_length
Terminal length
**type**\: int
**range:** 0..4294967295
.. attribute:: terminal_type
Terminal type
**type**\: str
.. attribute:: terminal_width
Line width
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.absolute_timeout = None
self.async_interface = None
self.domain_lookup_enabled = None
self.flow_control_start_character = None
self.flow_control_stop_character = None
self.idle_time = None
self.motd_banner_enabled = None
self.private_flag = None
self.terminal_length = None
self.terminal_type = None
self.terminal_width = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:general-statistics'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.absolute_timeout is not None:
return True
if self.async_interface is not None:
return True
if self.domain_lookup_enabled is not None:
return True
if self.flow_control_start_character is not None:
return True
if self.flow_control_stop_character is not None:
return True
if self.idle_time is not None:
return True
if self.motd_banner_enabled is not None:
return True
if self.private_flag is not None:
return True
if self.terminal_length is not None:
return True
if self.terminal_type is not None:
return True
if self.terminal_width is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.VtyStatistics.GeneralStatistics']['meta_info']
class Exec_(object):
"""
Exec related statistics
.. attribute:: time_stamp_enabled
Specifies whether timestamp is enabled or not
**type**\: bool
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.time_stamp_enabled = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:exec'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.time_stamp_enabled is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.VtyStatistics.Exec_']['meta_info']
class Aaa(object):
"""
AAA related statistics
.. attribute:: user_name
The authenticated username
**type**\: str
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.user_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:aaa'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.user_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.VtyStatistics.Aaa']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:vty-statistics'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.aaa is not None and self.aaa._has_data():
return True
if self.connection is not None and self.connection._has_data():
return True
if self.exec_ is not None and self.exec_._has_data():
return True
if self.general_statistics is not None and self.general_statistics._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.VtyStatistics']['meta_info']
class State(object):
"""
Line state information
.. attribute:: general
General information
**type**\: :py:class:`General <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.State.General>`
.. attribute:: template
Information related to template applied to the line
**type**\: :py:class:`Template <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.State.Template>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.general = Tty.VtyLines.VtyLine.State.General()
self.general.parent = self
self.template = Tty.VtyLines.VtyLine.State.Template()
self.template.parent = self
class Template(object):
"""
Information related to template applied to the
line
.. attribute:: name
Name of the template
**type**\: str
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:template'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.State.Template']['meta_info']
class General(object):
"""
General information
.. attribute:: general_state
State of the line
**type**\: :py:class:`LineStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.LineStateEnum>`
.. attribute:: operation
application running of on the tty line
**type**\: :py:class:`SessionOperationEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.SessionOperationEnum>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.general_state = None
self.operation = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:general'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.general_state is not None:
return True
if self.operation is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.State.General']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.general is not None and self.general._has_data():
return True
if self.template is not None and self.template._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.State']['meta_info']
class Configuration(object):
"""
Configuration information of the line
.. attribute:: connection_configuration
Conection configuration information
**type**\: :py:class:`ConnectionConfiguration <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.Configuration.ConnectionConfiguration>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.connection_configuration = Tty.VtyLines.VtyLine.Configuration.ConnectionConfiguration()
self.connection_configuration.parent = self
class ConnectionConfiguration(object):
"""
Conection configuration information
.. attribute:: acl_in
ACL for inbound traffic
**type**\: str
.. attribute:: acl_out
ACL for outbound traffic
**type**\: str
.. attribute:: transport_input
Protocols to use when connecting to the terminal server
**type**\: :py:class:`TransportInput <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.Configuration.ConnectionConfiguration.TransportInput>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.acl_in = None
self.acl_out = None
self.transport_input = Tty.VtyLines.VtyLine.Configuration.ConnectionConfiguration.TransportInput()
self.transport_input.parent = self
class TransportInput(object):
"""
Protocols to use when connecting to the
terminal server
.. attribute:: none
Not used
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: protocol1
Transport protocol1
**type**\: :py:class:`TtyTransportProtocolEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_management_datatypes.TtyTransportProtocolEnum>`
.. attribute:: protocol2
Transport protocol2
**type**\: :py:class:`TtyTransportProtocolEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_management_datatypes.TtyTransportProtocolEnum>`
.. attribute:: select
Choose transport protocols
**type**\: :py:class:`TtyTransportProtocolSelectEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_management_datatypes.TtyTransportProtocolSelectEnum>`
**default value**\: all
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.none = None
self.protocol1 = None
self.protocol2 = None
self.select = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:transport-input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.none is not None:
return True
if self.protocol1 is not None:
return True
if self.protocol2 is not None:
return True
if self.select is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.Configuration.ConnectionConfiguration.TransportInput']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:connection-configuration'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.acl_in is not None:
return True
if self.acl_out is not None:
return True
if self.transport_input is not None and self.transport_input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.Configuration.ConnectionConfiguration']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:configuration'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.connection_configuration is not None and self.connection_configuration._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.Configuration']['meta_info']
class Sessions(object):
"""
Outgoing sessions
.. attribute:: outgoing_connection
List of outgoing sessions
**type**\: list of :py:class:`OutgoingConnection <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.Sessions.OutgoingConnection>`
"""
_prefix = 'tty-management-oper'
_revision = '2015-01-07'
def __init__(self):
self.parent = None
self.outgoing_connection = YList()
self.outgoing_connection.parent = self
self.outgoing_connection.name = 'outgoing_connection'
class OutgoingConnection(object):
"""
List of outgoing sessions
.. attribute:: connection_id
Connection ID [1\-20]
**type**\: int
**range:** 0..255
.. attribute:: host_address
Host address
**type**\: :py:class:`HostAddress <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.Sessions.OutgoingConnection.HostAddress>`
.. attribute:: host_name
Host name
**type**\: str
.. attribute:: idle_time
Elapsed time since session was suspended (in seconds)
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: is_last_active_session
True indicates last active session
**type**\: bool
.. attribute:: transport_protocol
Session transport protocol
**type**\: :py:class:`TransportServiceEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_management_oper.TransportServiceEnum>`
"""
_prefix = 'tty-management-oper'
_revision = '2015-01-07'
def __init__(self):
self.parent = None
self.connection_id = None
self.host_address = Tty.VtyLines.VtyLine.Sessions.OutgoingConnection.HostAddress()
self.host_address.parent = self
self.host_name = None
self.idle_time = None
self.is_last_active_session = None
self.transport_protocol = None
class HostAddress(object):
"""
Host address
.. attribute:: af_name
AFName
**type**\: :py:class:`HostAfIdBaseIdentity <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_management_oper.HostAfIdBaseIdentity>`
.. attribute:: ipv4_address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv6_address
IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'tty-management-oper'
_revision = '2015-01-07'
def __init__(self):
self.parent = None
self.af_name = None
self.ipv4_address = None
self.ipv6_address = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-management-oper:host-address'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.af_name is not None:
return True
if self.ipv4_address is not None:
return True
if self.ipv6_address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.Sessions.OutgoingConnection.HostAddress']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-management-oper:outgoing-connection'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.connection_id is not None:
return True
if self.host_address is not None and self.host_address._has_data():
return True
if self.host_name is not None:
return True
if self.idle_time is not None:
return True
if self.is_last_active_session is not None:
return True
if self.transport_protocol is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.Sessions.OutgoingConnection']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-management-oper:sessions'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.outgoing_connection is not None:
for child_ref in self.outgoing_connection:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.Sessions']['meta_info']
@property
def _common_path(self):
if self.line_number is None:
raise YPYModelError('Key property line_number is None')
return '/Cisco-IOS-XR-tty-server-oper:tty/Cisco-IOS-XR-tty-server-oper:vty-lines/Cisco-IOS-XR-tty-server-oper:vty-line[Cisco-IOS-XR-tty-server-oper:line-number = ' + str(self.line_number) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.line_number is not None:
return True
if self.configuration is not None and self.configuration._has_data():
return True
if self.sessions is not None and self.sessions._has_data():
return True
if self.state is not None and self.state._has_data():
return True
if self.vty_statistics is not None and self.vty_statistics._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tty-server-oper:tty/Cisco-IOS-XR-tty-server-oper:vty-lines'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.vty_line is not None:
for child_ref in self.vty_line:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines']['meta_info']
class AuxiliaryNodes(object):
"""
List of Nodes attached with an auxiliary line
.. attribute:: auxiliary_node
Line configuration on a node
**type**\: list of :py:class:`AuxiliaryNode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.auxiliary_node = YList()
self.auxiliary_node.parent = self
self.auxiliary_node.name = 'auxiliary_node'
class AuxiliaryNode(object):
"""
Line configuration on a node
.. attribute:: id <key>
Node ID
**type**\: str
**pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+)
.. attribute:: auxiliary_line
Auxiliary line
**type**\: :py:class:`AuxiliaryLine <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.id = None
self.auxiliary_line = Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine()
self.auxiliary_line.parent = self
class AuxiliaryLine(object):
"""
Auxiliary line
.. attribute:: auxiliary_statistics
Statistics of the auxiliary line
**type**\: :py:class:`AuxiliaryStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics>`
.. attribute:: configuration
Configuration information of the line
**type**\: :py:class:`Configuration <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.Configuration>`
.. attribute:: state
Line state information
**type**\: :py:class:`State <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.State>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.auxiliary_statistics = Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics()
self.auxiliary_statistics.parent = self
self.configuration = Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.Configuration()
self.configuration.parent = self
self.state = Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.State()
self.state.parent = self
class AuxiliaryStatistics(object):
"""
Statistics of the auxiliary line
.. attribute:: aaa
AAA related statistics
**type**\: :py:class:`Aaa <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics.Aaa>`
.. attribute:: exec_
Exec related statistics
**type**\: :py:class:`Exec_ <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics.Exec_>`
.. attribute:: general_statistics
General statistics of line
**type**\: :py:class:`GeneralStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics.GeneralStatistics>`
.. attribute:: rs232
RS232 statistics of console line
**type**\: :py:class:`Rs232 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics.Rs232>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.aaa = Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics.Aaa()
self.aaa.parent = self
self.exec_ = Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics.Exec_()
self.exec_.parent = self
self.general_statistics = Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics.GeneralStatistics()
self.general_statistics.parent = self
self.rs232 = Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics.Rs232()
self.rs232.parent = self
class Rs232(object):
"""
RS232 statistics of console line
.. attribute:: baud_rate
Inbound/Outbound baud rate in bps
**type**\: int
**range:** 0..4294967295
**units**\: bit/s
.. attribute:: data_bits
Number of databits
**type**\: int
**range:** 0..4294967295
**units**\: bit
.. attribute:: exec_disabled
Exec disabled on TTY
**type**\: bool
.. attribute:: framing_error_count
Framing error count
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_flow_control_status
Hardware flow control status
**type**\: int
**range:** 0..4294967295
.. attribute:: overrun_error_count
Overrun error count
**type**\: int
**range:** 0..4294967295
.. attribute:: parity_error_count
Parity error count
**type**\: int
**range:** 0..4294967295
.. attribute:: parity_status
Parity status
**type**\: int
**range:** 0..4294967295
.. attribute:: stop_bits
Number of stopbits
**type**\: int
**range:** 0..4294967295
**units**\: bit
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.baud_rate = None
self.data_bits = None
self.exec_disabled = None
self.framing_error_count = None
self.hardware_flow_control_status = None
self.overrun_error_count = None
self.parity_error_count = None
self.parity_status = None
self.stop_bits = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:rs232'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.baud_rate is not None:
return True
if self.data_bits is not None:
return True
if self.exec_disabled is not None:
return True
if self.framing_error_count is not None:
return True
if self.hardware_flow_control_status is not None:
return True
if self.overrun_error_count is not None:
return True
if self.parity_error_count is not None:
return True
if self.parity_status is not None:
return True
if self.stop_bits is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics.Rs232']['meta_info']
class GeneralStatistics(object):
"""
General statistics of line
.. attribute:: absolute_timeout
Absolute timeout period
**type**\: int
**range:** 0..4294967295
.. attribute:: async_interface
Usable as async interface
**type**\: bool
.. attribute:: domain_lookup_enabled
DNS resolution enabled
**type**\: bool
.. attribute:: flow_control_start_character
Software flow control start char
**type**\: int
**range:** \-128..127
.. attribute:: flow_control_stop_character
Software flow control stop char
**type**\: int
**range:** \-128..127
.. attribute:: idle_time
TTY idle time
**type**\: int
**range:** 0..4294967295
.. attribute:: motd_banner_enabled
MOTD banner enabled
**type**\: bool
.. attribute:: private_flag
TTY private flag
**type**\: bool
.. attribute:: terminal_length
Terminal length
**type**\: int
**range:** 0..4294967295
.. attribute:: terminal_type
Terminal type
**type**\: str
.. attribute:: terminal_width
Line width
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.absolute_timeout = None
self.async_interface = None
self.domain_lookup_enabled = None
self.flow_control_start_character = None
self.flow_control_stop_character = None
self.idle_time = None
self.motd_banner_enabled = None
self.private_flag = None
self.terminal_length = None
self.terminal_type = None
self.terminal_width = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:general-statistics'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.absolute_timeout is not None:
return True
if self.async_interface is not None:
return True
if self.domain_lookup_enabled is not None:
return True
if self.flow_control_start_character is not None:
return True
if self.flow_control_stop_character is not None:
return True
if self.idle_time is not None:
return True
if self.motd_banner_enabled is not None:
return True
if self.private_flag is not None:
return True
if self.terminal_length is not None:
return True
if self.terminal_type is not None:
return True
if self.terminal_width is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics.GeneralStatistics']['meta_info']
class Exec_(object):
"""
Exec related statistics
.. attribute:: time_stamp_enabled
Specifies whether timestamp is enabled or not
**type**\: bool
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.time_stamp_enabled = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:exec'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.time_stamp_enabled is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics.Exec_']['meta_info']
class Aaa(object):
"""
AAA related statistics
.. attribute:: user_name
The authenticated username
**type**\: str
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.user_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:aaa'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.user_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics.Aaa']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:auxiliary-statistics'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.aaa is not None and self.aaa._has_data():
return True
if self.exec_ is not None and self.exec_._has_data():
return True
if self.general_statistics is not None and self.general_statistics._has_data():
return True
if self.rs232 is not None and self.rs232._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics']['meta_info']
class State(object):
"""
Line state information
.. attribute:: general
General information
**type**\: :py:class:`General <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.State.General>`
.. attribute:: template
Information related to template applied to the line
**type**\: :py:class:`Template <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.State.Template>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.general = Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.State.General()
self.general.parent = self
self.template = Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.State.Template()
self.template.parent = self
class Template(object):
"""
Information related to template applied to the
line
.. attribute:: name
Name of the template
**type**\: str
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:template'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.State.Template']['meta_info']
class General(object):
"""
General information
.. attribute:: general_state
State of the line
**type**\: :py:class:`LineStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.LineStateEnum>`
.. attribute:: operation
application running of on the tty line
**type**\: :py:class:`SessionOperationEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.SessionOperationEnum>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.general_state = None
self.operation = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:general'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.general_state is not None:
return True
if self.operation is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.State.General']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.general is not None and self.general._has_data():
return True
if self.template is not None and self.template._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.State']['meta_info']
class Configuration(object):
"""
Configuration information of the line
.. attribute:: connection_configuration
Conection configuration information
**type**\: :py:class:`ConnectionConfiguration <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.Configuration.ConnectionConfiguration>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.connection_configuration = Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.Configuration.ConnectionConfiguration()
self.connection_configuration.parent = self
class ConnectionConfiguration(object):
"""
Conection configuration information
.. attribute:: acl_in
ACL for inbound traffic
**type**\: str
.. attribute:: acl_out
ACL for outbound traffic
**type**\: str
.. attribute:: transport_input
Protocols to use when connecting to the terminal server
**type**\: :py:class:`TransportInput <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.Configuration.ConnectionConfiguration.TransportInput>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.acl_in = None
self.acl_out = None
self.transport_input = Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.Configuration.ConnectionConfiguration.TransportInput()
self.transport_input.parent = self
class TransportInput(object):
"""
Protocols to use when connecting to the
terminal server
.. attribute:: none
Not used
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: protocol1
Transport protocol1
**type**\: :py:class:`TtyTransportProtocolEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_management_datatypes.TtyTransportProtocolEnum>`
.. attribute:: protocol2
Transport protocol2
**type**\: :py:class:`TtyTransportProtocolEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_management_datatypes.TtyTransportProtocolEnum>`
.. attribute:: select
Choose transport protocols
**type**\: :py:class:`TtyTransportProtocolSelectEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_management_datatypes.TtyTransportProtocolSelectEnum>`
**default value**\: all
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.none = None
self.protocol1 = None
self.protocol2 = None
self.select = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:transport-input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.none is not None:
return True
if self.protocol1 is not None:
return True
if self.protocol2 is not None:
return True
if self.select is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.Configuration.ConnectionConfiguration.TransportInput']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:connection-configuration'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.acl_in is not None:
return True
if self.acl_out is not None:
return True
if self.transport_input is not None and self.transport_input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.Configuration.ConnectionConfiguration']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:configuration'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.connection_configuration is not None and self.connection_configuration._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.Configuration']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:auxiliary-line'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.auxiliary_statistics is not None and self.auxiliary_statistics._has_data():
return True
if self.configuration is not None and self.configuration._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine']['meta_info']
@property
def _common_path(self):
if self.id is None:
raise YPYModelError('Key property id is None')
return '/Cisco-IOS-XR-tty-server-oper:tty/Cisco-IOS-XR-tty-server-oper:auxiliary-nodes/Cisco-IOS-XR-tty-server-oper:auxiliary-node[Cisco-IOS-XR-tty-server-oper:id = ' + str(self.id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.id is not None:
return True
if self.auxiliary_line is not None and self.auxiliary_line._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tty-server-oper:tty/Cisco-IOS-XR-tty-server-oper:auxiliary-nodes'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.auxiliary_node is not None:
for child_ref in self.auxiliary_node:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tty-server-oper:tty'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.auxiliary_nodes is not None and self.auxiliary_nodes._has_data():
return True
if self.console_nodes is not None and self.console_nodes._has_data():
return True
if self.vty_lines is not None and self.vty_lines._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty']['meta_info']
| 39.738568
| 269
| 0.434906
| 10,364
| 126,011
| 5.066577
| 0.027499
| 0.041897
| 0.052371
| 0.041345
| 0.926966
| 0.908817
| 0.875395
| 0.869511
| 0.862693
| 0.853152
| 0
| 0.016917
| 0.500393
| 126,011
| 3,170
| 270
| 39.751104
| 0.817158
| 0.216878
| 0
| 0.858166
| 0
| 0.004298
| 0.099113
| 0.057569
| 0
| 0
| 0
| 0
| 0
| 1
| 0.162607
| false
| 0
| 0.037249
| 0.002865
| 0.505014
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
ffe8e797ff507e4c149d85bd0df812957a4e0587
| 2,077
|
py
|
Python
|
mama_cas/migrations/0002_ticket.py
|
taoky/django-mama-cas
|
9fbca9c33bfb139da12d922f7583c27c654e4049
|
[
"BSD-3-Clause"
] | null | null | null |
mama_cas/migrations/0002_ticket.py
|
taoky/django-mama-cas
|
9fbca9c33bfb139da12d922f7583c27c654e4049
|
[
"BSD-3-Clause"
] | null | null | null |
mama_cas/migrations/0002_ticket.py
|
taoky/django-mama-cas
|
9fbca9c33bfb139da12d922f7583c27c654e4049
|
[
"BSD-3-Clause"
] | null | null | null |
# Generated by Django 3.2.6 on 2021-09-02 10:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mama_cas', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='proxygrantingticket',
name='login_ip',
field=models.GenericIPAddressField(blank=True, null=True, verbose_name='ip'),
),
migrations.AddField(
model_name='proxygrantingticket',
name='username',
field=models.CharField(blank=True, max_length=255, verbose_name='username'),
),
migrations.AddField(
model_name='proxyticket',
name='login_ip',
field=models.GenericIPAddressField(blank=True, null=True, verbose_name='ip'),
),
migrations.AddField(
model_name='proxyticket',
name='username',
field=models.CharField(blank=True, max_length=255, verbose_name='username'),
),
migrations.AddField(
model_name='serviceticket',
name='login_ip',
field=models.GenericIPAddressField(blank=True, null=True, verbose_name='ip'),
),
migrations.AddField(
model_name='serviceticket',
name='username',
field=models.CharField(blank=True, max_length=255, verbose_name='username'),
),
migrations.AlterField(
model_name='proxygrantingticket',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='proxyticket',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='serviceticket',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
]
| 35.20339
| 111
| 0.597496
| 199
| 2,077
| 6.075377
| 0.266332
| 0.066998
| 0.114144
| 0.133995
| 0.832093
| 0.832093
| 0.738627
| 0.738627
| 0.738627
| 0.738627
| 0
| 0.018805
| 0.283101
| 2,077
| 58
| 112
| 35.810345
| 0.79315
| 0.021666
| 0
| 0.865385
| 1
| 0
| 0.117734
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.019231
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
08178215b576ae3a998049998edb796d77b9eced
| 109
|
py
|
Python
|
partools/mail/__init__.py
|
paularnaud2/PyTools
|
09accc33e1dcfdde45671ad5962727554648b30c
|
[
"MIT"
] | null | null | null |
partools/mail/__init__.py
|
paularnaud2/PyTools
|
09accc33e1dcfdde45671ad5962727554648b30c
|
[
"MIT"
] | null | null | null |
partools/mail/__init__.py
|
paularnaud2/PyTools
|
09accc33e1dcfdde45671ad5962727554648b30c
|
[
"MIT"
] | null | null | null |
from .main import gmail
from .main import no_auth
from .main import outlook
from .functions import init_mail
| 21.8
| 32
| 0.816514
| 18
| 109
| 4.833333
| 0.555556
| 0.275862
| 0.482759
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146789
| 109
| 4
| 33
| 27.25
| 0.935484
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
08465bf2dc785a727396d083bf37deae0409c8f8
| 1,050
|
py
|
Python
|
red-scare/src/test_output.py
|
Sebastian-ba/DoDoBing
|
6edcc18de22ad76505d2c13ac6a207a2c274cc95
|
[
"MIT"
] | 3
|
2017-09-25T11:59:20.000Z
|
2017-11-20T12:55:21.000Z
|
red-scare/src/test_output.py
|
ITU-2019/DoDoBing
|
6edcc18de22ad76505d2c13ac6a207a2c274cc95
|
[
"MIT"
] | 6
|
2017-09-25T12:04:51.000Z
|
2017-11-13T07:51:40.000Z
|
red-scare/src/test_output.py
|
ITU-2019/DoDoBing
|
6edcc18de22ad76505d2c13ac6a207a2c274cc95
|
[
"MIT"
] | null | null | null |
from main import *
def test_output_1():
print()
res = output("Testfile", 43, True, 3,2,1, False, False)
res1 = output("Testfile", 43, True, 3,2,1, False, False)
res2 = output("Testfile", 43, True, 3,2,1, False, False)
res3 = output("Testfile", 43, True, 3,2,1, False, False)
res4 = output("Testfile", 43, True, 3,2,1, False, False)
res5 = output("Testfile", 43, True, 3,2,1, False, False)
print(res)
print(res1)
print(res2)
print(res3)
print(res4)
print(res5)
def test_output_2():
print()
res = output("Testfile", 43, True, 3,2,1, False, True)
res1 = output("Testfile", 43, True, 3,2,1, False, True)
res2 = output("Testfile", 43, True, 3,2,1, False, True)
res3 = output("Testfile", 43, True, 3,2,1, False, True)
res4 = output("Testfile", 43, True, 3,2,1, False, True)
res5 = output("Testfile", 43, True, 3,2,1, False, True)
print(res)
print(res1)
print(res2)
print(res3)
print(res4)
print(res5)
| 29.166667
| 61
| 0.569524
| 157
| 1,050
| 3.783439
| 0.127389
| 0.282828
| 0.323232
| 0.40404
| 0.929293
| 0.929293
| 0.929293
| 0.929293
| 0.929293
| 0.299663
| 0
| 0.105534
| 0.26
| 1,050
| 35
| 62
| 30
| 0.658945
| 0
| 0
| 0.482759
| 0
| 0
| 0.094768
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068966
| false
| 0
| 0.034483
| 0
| 0.103448
| 0.482759
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
4b4280b41cc95077e54f7dd66f0b715ae4c9fea2
| 2,770
|
py
|
Python
|
daq-gui/roi_window_ui.py
|
Sovichea/instrumentation-opendaq-rpi3
|
ed8b00bce131d8960ddc2a0d130c442b77a4a087
|
[
"MIT"
] | 1
|
2017-12-08T17:07:12.000Z
|
2017-12-08T17:07:12.000Z
|
daq-gui/roi_window_ui.py
|
Sovichea/instrumentation-opendaq-rpi3
|
ed8b00bce131d8960ddc2a0d130c442b77a4a087
|
[
"MIT"
] | null | null | null |
daq-gui/roi_window_ui.py
|
Sovichea/instrumentation-opendaq-rpi3
|
ed8b00bce131d8960ddc2a0d130c442b77a4a087
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'roi_window.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Form_roi(object):
def setupUi(self, Form_roi):
Form_roi.setObjectName(_fromUtf8("Form_roi"))
Form_roi.resize(400, 300)
self.verticalLayout = QtGui.QVBoxLayout(Form_roi)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.graphicsView = PlotWidget(Form_roi)
self.graphicsView.setObjectName(_fromUtf8("graphicsView"))
self.verticalLayout.addWidget(self.graphicsView)
self.retranslateUi(Form_roi)
QtCore.QMetaObject.connectSlotsByName(Form_roi)
def retranslateUi(self, Form_roi):
Form_roi.setWindowTitle(_translate("Form_roi", "Form", None))
from pyqtgraph import PlotWidget
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'roi_window.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Form_roi(object):
def setupUi(self, Form_roi):
Form_roi.setObjectName(_fromUtf8("Form_roi"))
Form_roi.resize(400, 300)
self.verticalLayout = QtGui.QVBoxLayout(Form_roi)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.graphicsView = PlotWidget(Form_roi)
self.graphicsView.setObjectName(_fromUtf8("graphicsView"))
self.verticalLayout.addWidget(self.graphicsView)
self.retranslateUi(Form_roi)
QtCore.QMetaObject.connectSlotsByName(Form_roi)
def retranslateUi(self, Form_roi):
Form_roi.setWindowTitle(_translate("Form_roi", "Form", None))
from pyqtgraph import PlotWidget
| 33.373494
| 80
| 0.701805
| 308
| 2,770
| 6.155844
| 0.211039
| 0.088608
| 0.084388
| 0.118143
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0.018165
| 0.205054
| 2,770
| 82
| 81
| 33.780488
| 0.84287
| 0.133213
| 0
| 1
| 1
| 0
| 0.039896
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.185185
| false
| 0
| 0.074074
| 0.111111
| 0.407407
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 9
|
4bafa487132743606b971ee250c9b2ed7e3fc942
| 484
|
py
|
Python
|
code/chapter_02/listing_02_03.py
|
guinslym/python_earth_science_book
|
f4dd0115dbbce140c6713989f630a71238daa72c
|
[
"MIT"
] | 80
|
2021-04-19T10:03:57.000Z
|
2022-03-30T15:34:47.000Z
|
code/chapter_02/listing_02_03.py
|
guinslym/python_earth_science_book
|
f4dd0115dbbce140c6713989f630a71238daa72c
|
[
"MIT"
] | null | null | null |
code/chapter_02/listing_02_03.py
|
guinslym/python_earth_science_book
|
f4dd0115dbbce140c6713989f630a71238daa72c
|
[
"MIT"
] | 23
|
2021-04-25T03:50:07.000Z
|
2022-03-22T03:06:19.000Z
|
# this instruction is always executed
# this instruction is always executed
# this instruction is always executed
if a == 1:
# this instruction is executed if a = 1
if b == 3:
# this instruction is executed if a = 1 and b = 3
# this instruction is executed if a = 1 and b = 3
# this instruction is executed if a = 1
# this instruction is executed if a = 1
# this instruction is executed if a = 1
# this instruction is always executed
| 32.266667
| 57
| 0.652893
| 74
| 484
| 4.27027
| 0.148649
| 0.474684
| 0.537975
| 0.265823
| 0.993671
| 0.949367
| 0.949367
| 0.949367
| 0.949367
| 0.949367
| 0
| 0.029412
| 0.297521
| 484
| 14
| 58
| 34.571429
| 0.9
| 0.81405
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
4bbd260c0ec694c143844cf243aefc9245c52582
| 32,189
|
py
|
Python
|
tb_rest_client/api/api_ce/auth_controller_api.py
|
jernkuan/thingsboard-python-rest-client
|
3fb25272507494e6d494b27ca2380d3c543562e5
|
[
"Apache-2.0"
] | null | null | null |
tb_rest_client/api/api_ce/auth_controller_api.py
|
jernkuan/thingsboard-python-rest-client
|
3fb25272507494e6d494b27ca2380d3c543562e5
|
[
"Apache-2.0"
] | null | null | null |
tb_rest_client/api/api_ce/auth_controller_api.py
|
jernkuan/thingsboard-python-rest-client
|
3fb25272507494e6d494b27ca2380d3c543562e5
|
[
"Apache-2.0"
] | 1
|
2021-11-26T11:24:56.000Z
|
2021-11-26T11:24:56.000Z
|
# coding: utf-8
"""
ThingsBoard REST API
For instructions how to authorize requests please visit <a href='http://thingsboard.io/docs/reference/rest-api/'>REST API documentation page</a>. # noqa: E501
OpenAPI spec version: 2.0
Contact: info@thingsboard.io
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from tb_rest_client.api_client import ApiClient
class AuthControllerApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def activate_user_using_post(self, body, **kwargs): # noqa: E501
"""activateUser # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.activate_user_using_post(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str body: activateRequest (required)
:param bool send_activation_mail: sendActivationMail
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.activate_user_using_post_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.activate_user_using_post_with_http_info(body, **kwargs) # noqa: E501
return data
def activate_user_using_post_with_http_info(self, body, **kwargs): # noqa: E501
"""activateUser # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.activate_user_using_post_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str body: activateRequest (required)
:param bool send_activation_mail: sendActivationMail
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'send_activation_mail'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method activate_user_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `activate_user_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'send_activation_mail' in params:
query_params.append(('sendActivationMail', params['send_activation_mail'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/noauth/activate{?sendActivationMail}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def change_password_using_post(self, body, **kwargs): # noqa: E501
"""changePassword # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.change_password_using_post(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str body: changePasswordRequest (required)
:return: ObjectNode
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.change_password_using_post_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.change_password_using_post_with_http_info(body, **kwargs) # noqa: E501
return data
def change_password_using_post_with_http_info(self, body, **kwargs): # noqa: E501
"""changePassword # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.change_password_using_post_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str body: changePasswordRequest (required)
:return: ObjectNode
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method change_password_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `change_password_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/auth/changePassword', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ObjectNode', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def check_activate_token_using_get(self, activate_token, **kwargs): # noqa: E501
"""checkActivateToken # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_activate_token_using_get(activate_token, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str activate_token: activateToken (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.check_activate_token_using_get_with_http_info(activate_token, **kwargs) # noqa: E501
else:
(data) = self.check_activate_token_using_get_with_http_info(activate_token, **kwargs) # noqa: E501
return data
def check_activate_token_using_get_with_http_info(self, activate_token, **kwargs): # noqa: E501
"""checkActivateToken # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_activate_token_using_get_with_http_info(activate_token, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str activate_token: activateToken (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['activate_token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method check_activate_token_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'activate_token' is set
if ('activate_token' not in params or
params['activate_token'] is None):
raise ValueError("Missing the required parameter `activate_token` when calling `check_activate_token_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'activate_token' in params:
query_params.append(('activateToken', params['activate_token'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/noauth/activate{?activateToken}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def check_reset_token_using_get(self, reset_token, **kwargs): # noqa: E501
"""checkResetToken # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_reset_token_using_get(reset_token, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str reset_token: resetToken (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.check_reset_token_using_get_with_http_info(reset_token, **kwargs) # noqa: E501
else:
(data) = self.check_reset_token_using_get_with_http_info(reset_token, **kwargs) # noqa: E501
return data
def check_reset_token_using_get_with_http_info(self, reset_token, **kwargs): # noqa: E501
"""checkResetToken # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_reset_token_using_get_with_http_info(reset_token, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str reset_token: resetToken (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['reset_token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method check_reset_token_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'reset_token' is set
if ('reset_token' not in params or
params['reset_token'] is None):
raise ValueError("Missing the required parameter `reset_token` when calling `check_reset_token_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'reset_token' in params:
query_params.append(('resetToken', params['reset_token'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/noauth/resetPassword{?resetToken}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_password_policy_using_get(self, **kwargs): # noqa: E501
"""getUserPasswordPolicy # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_password_policy_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: UserPasswordPolicy
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_password_policy_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_user_password_policy_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_user_password_policy_using_get_with_http_info(self, **kwargs): # noqa: E501
"""getUserPasswordPolicy # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_password_policy_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: UserPasswordPolicy
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_password_policy_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/noauth/userPasswordPolicy', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserPasswordPolicy', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_using_get(self, **kwargs): # noqa: E501
"""getUser # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: User
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_user_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_user_using_get_with_http_info(self, **kwargs): # noqa: E501
"""getUser # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: User
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/auth/user', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='User', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def logout_using_post(self, **kwargs): # noqa: E501
"""logout # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.logout_using_post(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.logout_using_post_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.logout_using_post_with_http_info(**kwargs) # noqa: E501
return data
def logout_using_post_with_http_info(self, **kwargs): # noqa: E501
"""logout # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.logout_using_post_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method logout_using_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/auth/logout', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def request_reset_password_by_email_using_post(self, body, **kwargs): # noqa: E501
"""requestResetPasswordByEmail # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.request_reset_password_by_email_using_post(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str body: resetPasswordByEmailRequest (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.request_reset_password_by_email_using_post_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.request_reset_password_by_email_using_post_with_http_info(body, **kwargs) # noqa: E501
return data
def request_reset_password_by_email_using_post_with_http_info(self, body, **kwargs): # noqa: E501
"""requestResetPasswordByEmail # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.request_reset_password_by_email_using_post_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str body: resetPasswordByEmailRequest (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method request_reset_password_by_email_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `request_reset_password_by_email_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/noauth/resetPasswordByEmail', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def reset_password_using_post(self, body, **kwargs): # noqa: E501
"""resetPassword # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.reset_password_using_post(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str body: resetPasswordRequest (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.reset_password_using_post_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.reset_password_using_post_with_http_info(body, **kwargs) # noqa: E501
return data
def reset_password_using_post_with_http_info(self, body, **kwargs): # noqa: E501
"""resetPassword # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.reset_password_using_post_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str body: resetPasswordRequest (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method reset_password_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `reset_password_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/noauth/resetPassword', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 37.472643
| 163
| 0.608873
| 3,634
| 32,189
| 5.094386
| 0.051458
| 0.045806
| 0.027224
| 0.035002
| 0.952466
| 0.941285
| 0.931346
| 0.920758
| 0.910765
| 0.900394
| 0
| 0.014506
| 0.301842
| 32,189
| 858
| 164
| 37.516317
| 0.809282
| 0.302401
| 0
| 0.799145
| 0
| 0
| 0.172346
| 0.058427
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040598
| false
| 0.061966
| 0.008547
| 0
| 0.108974
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
29a86b74695876b3b452671dda5339b5c1b92205
| 1,601
|
py
|
Python
|
test/test_add_group.py
|
ruslankl9/python_training
|
7bcaf2606a80935a4a0c458af4e6a078f241fb38
|
[
"Apache-2.0"
] | null | null | null |
test/test_add_group.py
|
ruslankl9/python_training
|
7bcaf2606a80935a4a0c458af4e6a078f241fb38
|
[
"Apache-2.0"
] | null | null | null |
test/test_add_group.py
|
ruslankl9/python_training
|
7bcaf2606a80935a4a0c458af4e6a078f241fb38
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from model.group import Group
import pytest
def test_add_group_data(app, db, data_groups, check_ui):
group = data_groups
with pytest.allure.step('Given a group list'):
old_groups = db.get_group_list()
with pytest.allure.step('When I add a group %s to the list' % group):
app.group.create(group)
with pytest.allure.step('Then the new group list is equal to the old list with the added group'):
new_groups = db.get_group_list()
old_groups.append(group)
assert sorted(old_groups, key=group.id_or_max) == sorted(new_groups, key=group.id_or_max)
if check_ui:
assert sorted(map(lambda x: Group(id=x.id, name=x.name.strip()), new_groups), key=group.id_or_max) == \
sorted(app.group.get_group_list(), key=Group.id_or_max)
def test_add_group_json(app, db, json_groups, check_ui):
group = json_groups
with pytest.allure.step('Given a group list'):
old_groups = db.get_group_list()
with pytest.allure.step('When I add a group %s to the list' % group):
app.group.create(group)
with pytest.allure.step('Then the new group list is equal to the old list with the added group'):
new_groups = db.get_group_list()
old_groups.append(group)
assert sorted(old_groups, key=group.id_or_max) == sorted(new_groups, key=group.id_or_max)
if check_ui:
assert sorted(map(lambda x: Group(id=x.id, name=x.name.strip()), new_groups), key=group.id_or_max) == \
sorted(app.group.get_group_list(), key=Group.id_or_max)
| 48.515152
| 115
| 0.670206
| 262
| 1,601
| 3.889313
| 0.19084
| 0.088322
| 0.078508
| 0.09421
| 0.855741
| 0.855741
| 0.855741
| 0.855741
| 0.855741
| 0.855741
| 0
| 0.000793
| 0.212367
| 1,601
| 33
| 116
| 48.515152
| 0.807296
| 0.013117
| 0
| 0.785714
| 0
| 0
| 0.151995
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 1
| 0.071429
| false
| 0
| 0.071429
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
29bcd6aa3f57e6f463eca65febe9064b0eac349f
| 23,868
|
py
|
Python
|
tests/test_IP_drawer.py
|
dmulyalin/N2G
|
78c7395d0c08b45183c1951857a2fb8b2ead7fda
|
[
"MIT"
] | 84
|
2020-07-13T22:00:03.000Z
|
2022-03-24T03:35:03.000Z
|
tests/test_IP_drawer.py
|
dmulyalin/N2G
|
78c7395d0c08b45183c1951857a2fb8b2ead7fda
|
[
"MIT"
] | 14
|
2020-10-08T04:56:01.000Z
|
2022-03-12T17:32:59.000Z
|
tests/test_IP_drawer.py
|
dmulyalin/N2G
|
78c7395d0c08b45183c1951857a2fb8b2ead7fda
|
[
"MIT"
] | 12
|
2021-01-18T00:01:16.000Z
|
2022-03-14T09:09:28.000Z
|
import sys
sys.path.insert(0,'..')
# after updated sys path, can do N2G import from parent dir
from N2G import drawio_diagram as create_drawio_diagram
from N2G import yed_diagram as create_yed_diagram
from N2G import ip_drawer
def test_ip_drawing_yed_data_dict_base():
data = {"Cisco_IOS": ["""
switch_1# show run interfaces
interface Loopback0
description Routing Loopback
ip address 10.123.0.4 255.255.255.255
!
interface TenGigabitEthernet1/1/3
description to SWITCH_2 vrf VRF1 link 1
vrf forwarding VRF2
ip address 10.123.2.3 255.255.255.254
!
interface TenGigabitEthernet1/1/4
description to SWITCH_2 vrf VRF1 link 2
vrf forwarding VRF2
ip address 10.123.2.4 255.255.255.254
!
interface TenGigabitEthernet1/1/5
description to SWITCH_3
ip address 10.1.33.1 255.255.255.0
!
interface TenGigabitEthernet1/1/7
description to SWITCH_2 shared subnet
ip address 10.1.234.1 255.255.255.0
!
interface Vlan123
description Workstations Vlan
vrf forwarding CORP
ip address 10.123.111.1 255.255.255.0
ip address 10.123.222.1 255.255.255.0 secondary
ip address 10.123.233.1 255.255.255.0 secondary
""",
"""
switch_2# show run interfaces
interface Loopback0
description Routing Loopback
ip address 10.123.0.5 255.255.255.255
!
interface GigabitEthernet1/3
description to SWITCH_1 link 1
vrf forwarding VRF1
ip address 10.123.2.2 255.255.255.254
!
interface GigabitEthernet1/4
description to SWITCH_1 links 2
vrf forwarding VRF1
ip address 10.123.2.4 255.255.255.254
!
interface TenGigabitEthernet1/1/71
description to SWITCH_2 shared subnet
ip address 10.1.234.2 255.255.255.0
!
interface Vlan11
description Workstations Vlan
vrf forwarding Staff Workstations
ip address 10.11.11.1 255.255.255.0
!
interface Vlan22
description Workstations Vlan
vrf forwarding Staff Phones
ip address 10.22.22.1 255.255.255.0
"""]
}
config = {}
drawing = create_yed_diagram()
drawer = ip_drawer(drawing, config)
drawer.work(data)
drawer.drawing.dump_file(filename="test_ip_drawing_yed_data_dict_base.graphml", folder="./Output/")
with open ("./Output/test_ip_drawing_yed_data_dict_base.graphml") as produced:
with open("./Output/should_be_test_ip_drawing_yed_data_dict_base.graphml") as should_be:
assert produced.read() == should_be.read()
# test_ip_drawing_yed_data_dict_base()
def test_ip_drawing_yed_data_dict_group_links():
data = {"Cisco_IOS": ["""
switch_1# show run interfaces
interface Loopback0
description Routing Loopback
ip address 10.123.0.4 255.255.255.255
!
interface TenGigabitEthernet1/1/3
description to SWITCH_2 vrf VRF1 link 1
vrf forwarding VRF2
ip address 10.123.2.3 255.255.255.254
!
interface TenGigabitEthernet1/1/4
description to SWITCH_2 vrf VRF1 link 2
vrf forwarding VRF2
ip address 10.123.2.4 255.255.255.254
!
interface TenGigabitEthernet1/1/5
description to SWITCH_3
ip address 10.1.33.1 255.255.255.0
!
interface TenGigabitEthernet1/1/7
description to SWITCH_2 shared subnet
ip address 10.1.234.1 255.255.255.0
!
interface Vlan123
description Workstations Vlan
vrf forwarding CORP
ip address 10.123.111.1 255.255.255.0
ip address 10.123.222.1 255.255.255.0 secondary
ip address 10.123.233.1 255.255.255.0 secondary
""",
"""
switch_2# show run interfaces
interface Loopback0
description Routing Loopback
ip address 10.123.0.5 255.255.255.255
!
interface GigabitEthernet1/3
description to SWITCH_1 link 1
vrf forwarding VRF1
ip address 10.123.2.2 255.255.255.254
!
interface GigabitEthernet1/4
description to SWITCH_1 links 2
vrf forwarding VRF1
ip address 10.123.2.5 255.255.255.254
!
interface TenGigabitEthernet1/1/71
description to SWITCH_2 shared subnet
ip address 10.1.234.2 255.255.255.0
!
interface Vlan11
description Workstations Vlan
vrf forwarding Staff Workstations
ip address 10.11.11.1 255.255.255.0
!
interface Vlan22
description Workstations Vlan
vrf forwarding Staff Phones
ip address 10.22.22.1 255.255.255.0
"""]
}
config = {
"group_links": True
}
drawing = create_yed_diagram()
drawer = ip_drawer(drawing, config)
drawer.work(data)
drawer.drawing.dump_file(filename="test_ip_drawing_yed_data_dict_group_links.graphml", folder="./Output/")
with open ("./Output/test_ip_drawing_yed_data_dict_group_links.graphml") as produced:
with open("./Output/should_be_test_ip_drawing_yed_data_dict_group_links.graphml") as should_be:
assert produced.read() == should_be.read()
# test_ip_drawing_yed_data_dict_group_links()
def test_ip_drawing_yed_data_dict_add_vrf_link_label():
data = {"Cisco_IOS": ["""
switch_1# show run interfaces
interface Loopback0
description Routing Loopback
ip address 10.123.0.4 255.255.255.255
!
interface TenGigabitEthernet1/1/3
description to SWITCH_2 vrf VRF1 link 1
vrf forwarding VRF2
ip address 10.123.2.3 255.255.255.254
!
interface TenGigabitEthernet1/1/4
description to SWITCH_2 vrf VRF1 link 2
vrf forwarding VRF2
ip address 10.123.2.4 255.255.255.254
!
interface TenGigabitEthernet1/1/5
description to SWITCH_3
ip address 10.1.33.1 255.255.255.0
!
interface TenGigabitEthernet1/1/7
description to SWITCH_2 shared subnet
ip address 10.1.234.1 255.255.255.0
!
interface Vlan123
description Workstations Vlan
vrf forwarding CORP
ip address 10.123.111.1 255.255.255.0
ip address 10.123.222.1 255.255.255.0 secondary
ip address 10.123.233.1 255.255.255.0 secondary
""",
"""
switch_2# show run interfaces
interface Loopback0
description Routing Loopback
ip address 10.123.0.5 255.255.255.255
!
interface GigabitEthernet1/3
description to SWITCH_1 link 1
vrf forwarding VRF1
ip address 10.123.2.2 255.255.255.254
!
interface GigabitEthernet1/4
description to SWITCH_1 links 2
vrf forwarding VRF1
ip address 10.123.2.5 255.255.255.254
!
interface TenGigabitEthernet1/1/71
description to SWITCH_2 shared subnet
ip address 10.1.234.2 255.255.255.0
!
interface Vlan11
description Workstations Vlan
vrf forwarding Staff Workstations
ip address 10.11.11.1 255.255.255.0
!
interface Vlan22
description Workstations Vlan
vrf forwarding Staff Phones
ip address 10.22.22.1 255.255.255.0
"""]
}
config = {
"label_vrf": True
}
drawing = create_yed_diagram()
drawer = ip_drawer(drawing, config)
drawer.work(data)
drawer.drawing.dump_file(filename="test_ip_drawing_yed_data_dict_add_vrf_link_label.graphml", folder="./Output/")
with open ("./Output/test_ip_drawing_yed_data_dict_add_vrf_link_label.graphml") as produced:
with open("./Output/should_be_test_ip_drawing_yed_data_dict_add_vrf_link_label.graphml") as should_be:
assert produced.read() == should_be.read()
def test_ip_drawing_yed_data_dict_add_interface_link_label():
data = {"Cisco_IOS": ["""
switch_1# show run interfaces
interface Loopback0
description Routing Loopback
ip address 10.123.0.4 255.255.255.255
!
interface TenGigabitEthernet1/1/3
description to SWITCH_2 vrf VRF1 link 1
vrf forwarding VRF2
ip address 10.123.2.3 255.255.255.254
!
interface TenGigabitEthernet1/1/4
description to SWITCH_2 vrf VRF1 link 2
vrf forwarding VRF2
ip address 10.123.2.4 255.255.255.254
!
interface TenGigabitEthernet1/1/5
description to SWITCH_3
ip address 10.1.33.1 255.255.255.0
!
interface TenGigabitEthernet1/1/7
description to SWITCH_2 shared subnet
ip address 10.1.234.1 255.255.255.0
!
interface Vlan123
description Workstations Vlan
vrf forwarding CORP
ip address 10.123.111.1 255.255.255.0
ip address 10.123.222.1 255.255.255.0 secondary
ip address 10.123.233.1 255.255.255.0 secondary
""",
"""
switch_2# show run interfaces
interface Loopback0
description Routing Loopback
ip address 10.123.0.5 255.255.255.255
!
interface GigabitEthernet1/3
description to SWITCH_1 link 1
vrf forwarding VRF1
ip address 10.123.2.2 255.255.255.254
!
interface GigabitEthernet1/4
description to SWITCH_1 links 2
vrf forwarding VRF1
ip address 10.123.2.5 255.255.255.254
!
interface TenGigabitEthernet1/1/71
description to SWITCH_2 shared subnet
ip address 10.1.234.2 255.255.255.0
!
interface Vlan11
description Workstations Vlan
vrf forwarding Staff Workstations
ip address 10.11.11.1 255.255.255.0
!
interface Vlan22
description Workstations Vlan
vrf forwarding Staff Phones
ip address 10.22.22.1 255.255.255.0
"""]
}
config = {
"label_interface": True
}
drawing = create_yed_diagram()
drawer = ip_drawer(drawing, config)
drawer.work(data)
drawer.drawing.dump_file(filename="test_ip_drawing_yed_data_dict_add_interface_link_label.graphml", folder="./Output/")
with open ("./Output/test_ip_drawing_yed_data_dict_add_interface_link_label.graphml") as produced:
with open("./Output/should_be_test_ip_drawing_yed_data_dict_add_interface_link_label.graphml") as should_be:
assert produced.read() == should_be.read()
def test_ip_drawing_yed_data_dict_add_interface_vrf_link_label():
data = {"Cisco_IOS": ["""
switch_1# show run interfaces
interface Loopback0
description Routing Loopback
ip address 10.123.0.4 255.255.255.255
!
interface TenGigabitEthernet1/1/3
description to SWITCH_2 vrf VRF1 link 1
vrf forwarding VRF2
ip address 10.123.2.3 255.255.255.254
!
interface TenGigabitEthernet1/1/4
description to SWITCH_2 vrf VRF1 link 2
vrf forwarding VRF2
ip address 10.123.2.4 255.255.255.254
!
interface TenGigabitEthernet1/1/5
description to SWITCH_3
ip address 10.1.33.1 255.255.255.0
!
interface TenGigabitEthernet1/1/7
description to SWITCH_2 shared subnet
ip address 10.1.234.1 255.255.255.0
!
interface Vlan123
description Workstations Vlan
vrf forwarding CORP
ip address 10.123.111.1 255.255.255.0
ip address 10.123.222.1 255.255.255.0 secondary
ip address 10.123.233.1 255.255.255.0 secondary
""",
"""
switch_2# show run interfaces
interface Loopback0
description Routing Loopback
ip address 10.123.0.5 255.255.255.255
!
interface GigabitEthernet1/3
description to SWITCH_1 link 1
vrf forwarding VRF1
ip address 10.123.2.2 255.255.255.254
!
interface GigabitEthernet1/4
description to SWITCH_1 links 2
vrf forwarding VRF1
ip address 10.123.2.5 255.255.255.254
!
interface TenGigabitEthernet1/1/71
description to SWITCH_2 shared subnet
ip address 10.1.234.2 255.255.255.0
!
interface Vlan11
description Workstations Vlan
vrf forwarding Staff Workstations
ip address 10.11.11.1 255.255.255.0
!
interface Vlan22
description Workstations Vlan
vrf forwarding Staff Phones
ip address 10.22.22.1 255.255.255.0
"""]
}
config = {
"label_interface": True,
"label_vrf": True
}
drawing = create_yed_diagram()
drawer = ip_drawer(drawing, config)
drawer.work(data)
drawer.drawing.dump_file(filename="test_ip_drawing_yed_data_dict_add_interface_vrf_link_label.graphml", folder="./Output/")
with open ("./Output/test_ip_drawing_yed_data_dict_add_interface_vrf_link_label.graphml") as produced:
with open("./Output/should_be_test_ip_drawing_yed_data_dict_add_interface_vrf_link_label.graphml") as should_be:
assert produced.read() == should_be.read()
def test_ip_drawing_yed_data_dict_add_arp():
data = {"Cisco_IOS": ["""
switch_1# show run interfaces
interface TenGigabitEthernet1/1/5
description to SWITCH_3
ip address 10.1.33.1 255.255.255.0
!
interface TenGigabitEthernet1/1/7
description to SWITCH_2 shared subnet
ip address 10.1.234.1 255.255.255.0
!
interface Vlan123
description Workstations Vlan
vrf forwarding CORP
ip address 10.123.111.1 255.255.255.0
ip address 10.123.222.1 255.255.255.0 secondary
ip address 10.123.233.1 255.255.255.0 secondary
switch_1# show ip arp
Internet 10.123.111.1 - d094.6643.1111 ARPA Vlan123
Internet 10.123.111.3 0 0008.e3ff.1333 ARPA Vlan123
Internet 10.123.111.4 106 d867.d9b7.1444 ARPA Vlan123
Internet 10.123.111.5 106 d867.d9b7.1555 ARPA Vlan123
Internet 10.123.233.1 - 0008.e3ff.2111 ARPA Vlan123
Internet 10.123.233.3 166 d867.d9b7.2333 ARPA Vlan123
Internet 10.123.233.4 31 0008.e3ff.2444 ARPA Vlan123
Internet 10.123.233.6 31 0008.e3ff.2666 ARPA Vlan123
Internet 10.1.234.1 - d867.d9b7.1111 ARPA TenGigabitEthernet1/1/7
Internet 10.1.234.2 31 0008.e3ff.1234 ARPA TenGigabitEthernet1/1/7
Internet 10.1.234.3 31 0008.e3ff.4321 ARPA TenGigabitEthernet1/1/7
""",
"""
switch_2# show run interfaces
interface TenGigabitEthernet1/1/71
description to SWITCH_2 shared subnet
ip address 10.1.234.2 255.255.255.0
!
interface Vlan11
description Workstations Vlan
vrf forwarding Staff Workstations
ip address 10.11.11.1 255.255.255.0
!
interface Vlan22
description Workstations Vlan
vrf forwarding Staff Phones
ip address 10.22.22.1 255.255.255.0
switch_2# show ip arp
Internet 10.22.22.1 - d094.7890.1111 ARPA Vlan22
Internet 10.22.22.3 0 0008.7890.1333 ARPA Vlan22
Internet 10.22.22.4 106 d867.7890.1444 ARPA Vlan22
Internet 10.1.234.1 5 d867.d9b7.1111 ARPA TenGigabitEthernet1/1/71
Internet 10.1.234.2 - 0008.e3ff.1234 ARPA TenGigabitEthernet1/1/71
Internet 10.1.234.3 78 0008.e3ff.4321 ARPA TenGigabitEthernet1/1/71
"""]
}
config = {
"add_arp": True
}
drawing = create_yed_diagram()
drawer = ip_drawer(drawing, config)
drawer.work(data)
drawer.drawing.dump_file(filename="test_ip_drawing_yed_data_dict_add_arp.graphml", folder="./Output/")
with open ("./Output/test_ip_drawing_yed_data_dict_add_arp.graphml") as produced:
with open("./Output/should_be_test_ip_drawing_yed_data_dict_add_arp.graphml") as should_be:
assert produced.read() == should_be.read()
# test_ip_drawing_yed_data_dict_add_arp()
def test_ip_drawing_yed_data_dict_add_arp_and_fhrp():
data = {"Cisco_IOS": ["""
switch_1# show run interfaces
interface TenGigabitEthernet1/1/5
description to SWITCH_3
ip address 10.1.33.1 255.255.255.0
!
interface TenGigabitEthernet1/1/3
description to SWITCH_2 vrf VRF1 link 1
vrf forwarding VRF2
ip address 10.123.2.3 255.255.255.254
!
interface TenGigabitEthernet1/1/4
description to SWITCH_2 vrf VRF1 link 2
vrf forwarding VRF2
ip address 10.123.2.4 255.255.255.254
!
interface TenGigabitEthernet1/1/7
description to SWITCH_2 shared subnet
ip address 10.1.234.1 255.255.255.0
standby 1 10.1.234.99
!
interface Vlan123
description Workstations Vlan
vrf forwarding CORP
ip address 10.123.111.1 255.255.255.0
ip address 10.123.222.1 255.255.255.0 secondary
ip address 10.123.233.1 255.255.255.0 secondary
switch_1# show ip arp
Internet 10.123.111.1 - d094.6643.1111 ARPA Vlan123
Internet 10.123.111.3 0 0008.e3ff.1333 ARPA Vlan123
Internet 10.123.111.4 106 d867.d9b7.1444 ARPA Vlan123
Internet 10.123.111.5 106 d867.d9b7.1555 ARPA Vlan123
Internet 10.123.233.1 - 0008.e3ff.2111 ARPA Vlan123
Internet 10.123.233.3 166 d867.d9b7.2333 ARPA Vlan123
Internet 10.123.233.4 31 0008.e3ff.2444 ARPA Vlan123
Internet 10.123.233.6 31 0008.e3ff.2666 ARPA Vlan123
Internet 10.1.234.1 - d867.d9b7.1111 ARPA TenGigabitEthernet1/1/7
Internet 10.1.234.2 31 0008.e3ff.1234 ARPA TenGigabitEthernet1/1/7
Internet 10.1.234.3 31 0008.e3ff.4321 ARPA TenGigabitEthernet1/1/7
Internet 10.1.234.99 - 00ac.0007.001a ARPA TenGigabitEthernet1/1/7
""",
"""
switch_2# show run interfaces
interface TenGigabitEthernet1/1/71
description to SWITCH_2 shared subnet
ip address 10.1.234.2 255.255.255.0
standby 1 10.1.234.99
!
interface GigabitEthernet1/3
description to SWITCH_1 link 1
vrf forwarding VRF1
ip address 10.123.2.2 255.255.255.254
!
interface GigabitEthernet1/4
description to SWITCH_1 links 2
vrf forwarding VRF1
ip address 10.123.2.5 255.255.255.254
!
interface Vlan11
description Workstations Vlan
vrf forwarding Staff Workstations
ip address 10.11.11.1 255.255.255.0
!
interface Vlan22
description Workstations Vlan
vrf forwarding Staff Phones
ip address 10.22.22.1 255.255.255.0
switch_2# show ip arp
Internet 10.22.22.1 - d094.7890.1111 ARPA Vlan22
Internet 10.22.22.3 0 0008.7890.1333 ARPA Vlan22
Internet 10.22.22.4 106 d867.7890.1444 ARPA Vlan22
Internet 10.1.234.1 5 d867.d9b7.1111 ARPA TenGigabitEthernet1/1/71
Internet 10.1.234.2 - 0008.e3ff.1234 ARPA TenGigabitEthernet1/1/71
Internet 10.1.234.3 78 0008.e3ff.4321 ARPA TenGigabitEthernet1/1/71
Internet 10.1.234.99 5 00ac.0007.001a ARPA TenGigabitEthernet1/1/71
"""]
}
config = {
"add_arp": True,
"add_fhrp": True,
# "collapse_ptp": True
}
drawing = create_yed_diagram()
drawer = ip_drawer(drawing, config)
drawer.work(data)
drawer.drawing.dump_file(filename="test_ip_drawing_yed_data_dict_add_arp_and_fhrp.graphml", folder="./Output/")
with open ("./Output/test_ip_drawing_yed_data_dict_add_arp_and_fhrp.graphml") as produced:
with open("./Output/should_be_test_ip_drawing_yed_data_dict_add_arp_and_fhrp.graphml") as should_be:
assert produced.read() == should_be.read()
# test_ip_drawing_yed_data_dict_add_arp_and_fhrp()
def test_ip_drawing_yed_data_dict_nxos():
data = {"Cisco_NXOS": ["""
switch_1# show run | sec interface
interface Vlan133
description OOB
vrf member MGMT_OOB
ip address 10.133.137.2/24
hsrp 133
preempt
ip 10.133.137.1
!
interface Vlan134
description OOB-2
vrf member MGMT_OOB
ip address 10.134.137.2/24
vrrpv3 1334 address-family ipv4
address 10.134.137.1 primary
!
interface Vlan222
description PTP OSPF Routing pat to siwtch2
ip address 10.222.137.1/30
!
interface Vlan223
description PTP OSPF Routing pat to siwtch3
ip address 10.223.137.1/30
switch_1# show ip arp vrf all
10.133.137.2 - d094.7890.1111 Vlan133
10.133.137.1 - d094.7890.1111 Vlan133
10.133.137.30 - d094.7890.1234 Vlan133
10.133.137.91 - d094.7890.4321 Vlan133
10.134.137.1 - d094.7890.1111 Vlan134
10.134.137.2 - d094.7890.1111 Vlan134
10.134.137.3 90 d094.7890.2222 Vlan134
10.134.137.31 91 d094.7890.beef Vlan134
10.134.137.81 81 d094.7890.feeb Vlan134
10.222.137.2 21 d094.7890.2222 Vlan222
""",
"""
switch_2# show run | sec interface
interface Vlan134
description OOB-2
vrf member MGMT_OOB
ip address 10.134.137.3/24
vrrpv3 1334 address-family ipv4
address 10.134.137.1 primary
!
interface Vlan222
description PTP OSPF Routing pat to siwtch1
ip address 10.222.137.2/30
""",
"""
switch_3# show run | sec interface
interface Vlan223
description PTP OSPF Routing pat to siwtch1
ip address 10.223.137.2/30
"""]
}
config = {
"add_arp": True,
"add_fhrp": True
}
drawing = create_yed_diagram()
drawer = ip_drawer(drawing, config)
drawer.work(data)
drawer.drawing.dump_file(filename="test_ip_drawing_yed_data_dict_nxos.graphml", folder="./Output/")
with open ("./Output/test_ip_drawing_yed_data_dict_nxos.graphml") as produced:
with open("./Output/should_be_test_ip_drawing_yed_data_dict_nxos.graphml") as should_be:
assert produced.read() == should_be.read()
def test_ip_drawing_yed_data_dict_huawei():
data = {"Huawei": ["""
<hua_sw1>dis current-configuration interface
#
interface Vlanif140
ip binding vpn-instance VRF_MGMT
ip address 10.1.1.2 255.255.255.0
vrrp vrid 200 virtual-ip 10.1.1.1
#
interface Eth-Trunk5.123
vlan-type dot1q 123
description hua_sw2 BGP peering
ip binding vpn-instance VRF_MGMT
ip address 10.0.0.1 255.255.255.252
ipv6 address FD00:1::1/126
#
interface Eth-Trunk5.200
vlan-type dot1q 200
description hua_sw3 OSPF peering
ip address 192.168.2.2 255.255.255.252
<hua_sw1>dis arp all
10.1.1.2 a008-6fc1-1101 I Vlanif140 VRF_MGMT
10.1.1.1 a008-6fc1-1102 0 D Vlanif140 VRF_MGMT
10.1.1.3 a008-6fc1-1103 10 D/200 Vlanif140 VRF_MGMT
10.1.1.9 a008-6fc1-1104 10 D/200 Vlanif140 VRF_MGMT
10.0.0.2 a008-6fc1-1105 10 D/200 Eth-Trunk5.123 VRF_MGMT
""",
"""
<hua_sw2>dis current-configuration interface
#
interface Vlanif140
ip binding vpn-instance VRF_MGMT
ip address 10.1.1.3 255.255.255.0
vrrp vrid 200 virtual-ip 10.1.1.1
#
interface Eth-Trunk5.123
vlan-type dot1q 123
description hua_sw1 BGP peering
ip binding vpn-instance VRF_MGMT
ip address 10.0.0.2 255.255.255.252
ipv6 address FD00:1::2/126
""",
"""
<hua_sw3>dis current-configuration interface
#
interface Vlanif200
ip binding vpn-instance VRF_CUST1
ip address 192.168.1.1 255.255.255.0
#
interface Eth-Trunk5.200
vlan-type dot1q 200
description hua_sw1 OSPF peering
ip address 192.168.2.1 255.255.255.252
<hua_sw3>dis arp
192.168.1.1 a008-6fc1-1111 I Vlanif200
192.168.1.10 a008-6fc1-1110 30 D/300 Vlanif200
"""]
}
config = {
"add_arp": True,
"add_fhrp": True
}
drawing = create_yed_diagram()
drawer = ip_drawer(drawing, config)
drawer.work(data)
drawer.drawing.dump_file(filename="test_ip_drawing_yed_data_dict_huawei.graphml", folder="./Output/")
with open ("./Output/test_ip_drawing_yed_data_dict_huawei.graphml") as produced:
with open("./Output/should_be_test_ip_drawing_yed_data_dict_huawei.graphml") as should_be:
assert produced.read() == should_be.read()
# test_ip_drawing_yed_data_dict_huawei()
def test_ip_drawing_yed_data_dict_fortigate():
data = {"Fortigate": ["""
forti-fw-01 (Corporate) # get system config
config system interface
edit "vms_vlan"
set vdom "root"
set ip 1.1.1.1 255.255.255.0
set allowaccess ping https ssh snmp
set description "VMs data vlan"
set alias "vms_vlan"
config secondaryip
edit 1
set ip 10.38.1.152 255.255.255.0
set allowaccess ping
next
end
next
edit "NMS_mgmt"
set vdom "root"
set ip 10.0.0.1 255.255.255.0
set allowaccess ping https ssh snmp
set description "NMS management access"
set alias "NMS_mgmt"
next
edit "uplink_1"
set vdom "root"
set ip 10.1.0.1 255.255.255.252
set description "bgp to upstream FW"
next
forti-fw-01 (Corporate) # get system arp
Address Age(min) Hardware Addr Interface
1.1.1.10 0 22:31:5e:00:34:d1 vms_vlan
10.0.0.10 0 22:31:5e:00:34:c2 NMS_mgmt
10.0.0.31 0 22:31:5e:00:34:31 NMS_mgmt
""",
"""
forti-fw-02 (Corporate) # get system config
config system interface
edit "fw_1"
set vdom "root"
set ip 10.1.0.2 255.255.255.252
set description "bgp to forti-fw-01"
next
"""]
}
config = {
"add_arp": True
}
drawing = create_yed_diagram()
drawer = ip_drawer(drawing, config)
drawer.work(data)
drawer.drawing.dump_file(filename="test_ip_drawing_yed_data_dict_fortigate.graphml", folder="./Output/")
with open ("./Output/test_ip_drawing_yed_data_dict_fortigate.graphml") as produced:
with open("./Output/should_be_test_ip_drawing_yed_data_dict_fortigate.graphml") as should_be:
assert produced.read() == should_be.read()
# test_ip_drawing_yed_data_dict_fortigate()
| 31.909091
| 127
| 0.717907
| 3,859
| 23,868
| 4.298264
| 0.063229
| 0.07741
| 0.06077
| 0.037379
| 0.937963
| 0.92259
| 0.913848
| 0.887743
| 0.862543
| 0.856273
| 0
| 0.188648
| 0.192475
| 23,868
| 748
| 128
| 31.909091
| 0.671941
| 0.013784
| 0
| 0.578261
| 0
| 0.002174
| 0.735452
| 0.157763
| 0
| 0
| 0
| 0
| 0.021739
| 1
| 0.021739
| false
| 0
| 0.008696
| 0
| 0.030435
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d9c411f03c3fdc9efb1372388495a05d2a80ddde
| 141,293
|
py
|
Python
|
opcua/server/standard_address_space/standard_address_space_part3.py
|
aixiwang/opcua2cloud
|
32e1e745e4939f8d4fd51892d9a51230ffdfc198
|
[
"Apache-2.0"
] | null | null | null |
opcua/server/standard_address_space/standard_address_space_part3.py
|
aixiwang/opcua2cloud
|
32e1e745e4939f8d4fd51892d9a51230ffdfc198
|
[
"Apache-2.0"
] | null | null | null |
opcua/server/standard_address_space/standard_address_space_part3.py
|
aixiwang/opcua2cloud
|
32e1e745e4939f8d4fd51892d9a51230ffdfc198
|
[
"Apache-2.0"
] | 2
|
2019-01-14T10:13:57.000Z
|
2020-02-11T15:22:14.000Z
|
# -*- coding: utf-8 -*-
"""
DO NOT EDIT THIS FILE!
It is automatically generated from opcfoundation.org schemas.
"""
from opcua import ua
from opcua.ua import NodeId, QualifiedName, NumericNodeId, StringNodeId, GuidNodeId
from opcua.ua import NodeClass, LocalizedText
def create_standard_address_space_Part3(server):
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3062, 0)
node.BrowseName = QualifiedName('Default Binary', 0)
node.NodeClass = NodeClass.Object
node.TypeDefinition = NumericNodeId(58, 0)
attrs = ua.ObjectAttributes()
attrs.Description = LocalizedText("The default binary encoding for a data type.")
attrs.DisplayName = LocalizedText("Default Binary")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3062, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(58, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3063, 0)
node.BrowseName = QualifiedName('Default XML', 0)
node.NodeClass = NodeClass.Object
node.TypeDefinition = NumericNodeId(58, 0)
attrs = ua.ObjectAttributes()
attrs.Description = LocalizedText("The default XML encoding for a data type.")
attrs.DisplayName = LocalizedText("Default XML")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3063, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(58, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(24, 0)
node.BrowseName = QualifiedName('BaseDataType', 0)
node.NodeClass = NodeClass.DataType
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that can have any valid DataType.")
attrs.DisplayName = LocalizedText("BaseDataType")
attrs.IsAbstract = True
node.NodeAttributes = attrs
server.add_nodes([node])
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(26, 0)
node.BrowseName = QualifiedName('Number', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(24, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that can have any numeric DataType.")
attrs.DisplayName = LocalizedText("Number")
attrs.IsAbstract = True
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(26, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(24, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(27, 0)
node.BrowseName = QualifiedName('Integer', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(26, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that can have any integer DataType.")
attrs.DisplayName = LocalizedText("Integer")
attrs.IsAbstract = True
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(27, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(26, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(28, 0)
node.BrowseName = QualifiedName('UInteger', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(26, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that can have any unsigned integer DataType.")
attrs.DisplayName = LocalizedText("UInteger")
attrs.IsAbstract = True
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(28, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(26, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(29, 0)
node.BrowseName = QualifiedName('Enumeration', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(24, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that is an enumerated DataType.")
attrs.DisplayName = LocalizedText("Enumeration")
attrs.IsAbstract = True
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(29, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(24, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(1, 0)
node.BrowseName = QualifiedName('Boolean', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(24, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that is either TRUE or FALSE.")
attrs.DisplayName = LocalizedText("Boolean")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(1, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(24, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(2, 0)
node.BrowseName = QualifiedName('SByte', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(27, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that is an integer between -128 and 127.")
attrs.DisplayName = LocalizedText("SByte")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(2, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(27, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3, 0)
node.BrowseName = QualifiedName('Byte', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(28, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that is an integer between 0 and 255.")
attrs.DisplayName = LocalizedText("Byte")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(3, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(28, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(4, 0)
node.BrowseName = QualifiedName('Int16', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(27, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that is an integer between −32,768 and 32,767.")
attrs.DisplayName = LocalizedText("Int16")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(4, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(27, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(5, 0)
node.BrowseName = QualifiedName('UInt16', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(28, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that is an integer between 0 and 65535.")
attrs.DisplayName = LocalizedText("UInt16")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(5, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(28, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(6, 0)
node.BrowseName = QualifiedName('Int32', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(27, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that is an integer between −2,147,483,648 and 2,147,483,647.")
attrs.DisplayName = LocalizedText("Int32")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(6, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(27, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(7, 0)
node.BrowseName = QualifiedName('UInt32', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(28, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that is an integer between 0 and 4,294,967,295.")
attrs.DisplayName = LocalizedText("UInt32")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(7, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(28, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(8, 0)
node.BrowseName = QualifiedName('Int64', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(27, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that is an integer between −9,223,372,036,854,775,808 and 9,223,372,036,854,775,807.")
attrs.DisplayName = LocalizedText("Int64")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(8, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(27, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(9, 0)
node.BrowseName = QualifiedName('UInt64', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(28, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that is an integer between 0 and 18,446,744,073,709,551,615.")
attrs.DisplayName = LocalizedText("UInt64")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(9, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(28, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(10, 0)
node.BrowseName = QualifiedName('Float', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(26, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that is an IEEE 754-1985 single precision floating point number.")
attrs.DisplayName = LocalizedText("Float")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(10, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(26, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11, 0)
node.BrowseName = QualifiedName('Double', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(26, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that is an IEEE 754-1985 double precision floating point number.")
attrs.DisplayName = LocalizedText("Double")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(11, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(26, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(12, 0)
node.BrowseName = QualifiedName('String', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(24, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that is a sequence of printable Unicode characters.")
attrs.DisplayName = LocalizedText("String")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(12, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(24, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(13, 0)
node.BrowseName = QualifiedName('DateTime', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(24, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that is a Gregorian calender date and time.")
attrs.DisplayName = LocalizedText("DateTime")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(13, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(24, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(14, 0)
node.BrowseName = QualifiedName('Guid', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(24, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that is a 128-bit globally unique identifier.")
attrs.DisplayName = LocalizedText("Guid")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(14, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(24, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(15, 0)
node.BrowseName = QualifiedName('ByteString', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(24, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that is a sequence of bytes.")
attrs.DisplayName = LocalizedText("ByteString")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(15, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(24, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(16, 0)
node.BrowseName = QualifiedName('XmlElement', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(24, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that is an XML element.")
attrs.DisplayName = LocalizedText("XmlElement")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(16, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(24, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(17, 0)
node.BrowseName = QualifiedName('NodeId', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(24, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that is an identifier for a node within a Server address space.")
attrs.DisplayName = LocalizedText("NodeId")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(17, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(24, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(20, 0)
node.BrowseName = QualifiedName('QualifiedName', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(24, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that is a name qualified by a namespace.")
attrs.DisplayName = LocalizedText("QualifiedName")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(20, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(24, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(21, 0)
node.BrowseName = QualifiedName('LocalizedText', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(24, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that is human readable Unicode text with a locale identifier.")
attrs.DisplayName = LocalizedText("LocalizedText")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(21, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(24, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(22, 0)
node.BrowseName = QualifiedName('Structure', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(24, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that is any type of structure that can be described with a data encoding.")
attrs.DisplayName = LocalizedText("Structure")
attrs.IsAbstract = True
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(22, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(24, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(30, 0)
node.BrowseName = QualifiedName('Image', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(15, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that is an image encoded as a string of bytes.")
attrs.DisplayName = LocalizedText("Image")
attrs.IsAbstract = True
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(30, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(15, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(50, 0)
node.BrowseName = QualifiedName('Decimal', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(26, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes an arbitrary precision decimal value.")
attrs.DisplayName = LocalizedText("Decimal")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(50, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(26, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(31, 0)
node.BrowseName = QualifiedName('References', 0)
node.NodeClass = NodeClass.ReferenceType
attrs = ua.ReferenceTypeAttributes()
attrs.Description = LocalizedText("The abstract base type for all references.")
attrs.DisplayName = LocalizedText("References")
attrs.IsAbstract = True
attrs.Symmetric = True
node.NodeAttributes = attrs
server.add_nodes([node])
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(32, 0)
node.BrowseName = QualifiedName('NonHierarchicalReferences', 0)
node.NodeClass = NodeClass.ReferenceType
node.ParentNodeId = NumericNodeId(31, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ReferenceTypeAttributes()
attrs.Description = LocalizedText("The abstract base type for all non-hierarchical references.")
attrs.DisplayName = LocalizedText("NonHierarchicalReferences")
attrs.InverseName = LocalizedText("NonHierarchicalReferences")
attrs.IsAbstract = True
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(32, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(31, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(33, 0)
node.BrowseName = QualifiedName('HierarchicalReferences', 0)
node.NodeClass = NodeClass.ReferenceType
node.ParentNodeId = NumericNodeId(31, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ReferenceTypeAttributes()
attrs.Description = LocalizedText("The abstract base type for all hierarchical references.")
attrs.DisplayName = LocalizedText("HierarchicalReferences")
attrs.InverseName = LocalizedText("HierarchicalReferences")
attrs.IsAbstract = True
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(33, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(31, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(34, 0)
node.BrowseName = QualifiedName('HasChild', 0)
node.NodeClass = NodeClass.ReferenceType
node.ParentNodeId = NumericNodeId(33, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ReferenceTypeAttributes()
attrs.Description = LocalizedText("The abstract base type for all non-looping hierarchical references.")
attrs.DisplayName = LocalizedText("HasChild")
attrs.InverseName = LocalizedText("ChildOf")
attrs.IsAbstract = True
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(34, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(33, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(35, 0)
node.BrowseName = QualifiedName('Organizes', 0)
node.NodeClass = NodeClass.ReferenceType
node.ParentNodeId = NumericNodeId(33, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ReferenceTypeAttributes()
attrs.Description = LocalizedText("The type for hierarchical references that are used to organize nodes.")
attrs.DisplayName = LocalizedText("Organizes")
attrs.InverseName = LocalizedText("OrganizedBy")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(35, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(33, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(36, 0)
node.BrowseName = QualifiedName('HasEventSource', 0)
node.NodeClass = NodeClass.ReferenceType
node.ParentNodeId = NumericNodeId(33, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ReferenceTypeAttributes()
attrs.Description = LocalizedText("The type for non-looping hierarchical references that are used to organize event sources.")
attrs.DisplayName = LocalizedText("HasEventSource")
attrs.InverseName = LocalizedText("EventSourceOf")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(36, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(33, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(37, 0)
node.BrowseName = QualifiedName('HasModellingRule', 0)
node.NodeClass = NodeClass.ReferenceType
node.ParentNodeId = NumericNodeId(32, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ReferenceTypeAttributes()
attrs.Description = LocalizedText("The type for references from instance declarations to modelling rule nodes.")
attrs.DisplayName = LocalizedText("HasModellingRule")
attrs.InverseName = LocalizedText("ModellingRuleOf")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(37, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(32, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(38, 0)
node.BrowseName = QualifiedName('HasEncoding', 0)
node.NodeClass = NodeClass.ReferenceType
node.ParentNodeId = NumericNodeId(32, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ReferenceTypeAttributes()
attrs.Description = LocalizedText("The type for references from data type nodes to to data type encoding nodes.")
attrs.DisplayName = LocalizedText("HasEncoding")
attrs.InverseName = LocalizedText("EncodingOf")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(38, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(32, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(39, 0)
node.BrowseName = QualifiedName('HasDescription', 0)
node.NodeClass = NodeClass.ReferenceType
node.ParentNodeId = NumericNodeId(32, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ReferenceTypeAttributes()
attrs.Description = LocalizedText("The type for references from data type encoding nodes to data type description nodes.")
attrs.DisplayName = LocalizedText("HasDescription")
attrs.InverseName = LocalizedText("DescriptionOf")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(39, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(32, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(40, 0)
node.BrowseName = QualifiedName('HasTypeDefinition', 0)
node.NodeClass = NodeClass.ReferenceType
node.ParentNodeId = NumericNodeId(32, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ReferenceTypeAttributes()
attrs.Description = LocalizedText("The type for references from a instance node its type defintion node.")
attrs.DisplayName = LocalizedText("HasTypeDefinition")
attrs.InverseName = LocalizedText("TypeDefinitionOf")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(40, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(32, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(41, 0)
node.BrowseName = QualifiedName('GeneratesEvent', 0)
node.NodeClass = NodeClass.ReferenceType
node.ParentNodeId = NumericNodeId(32, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ReferenceTypeAttributes()
attrs.Description = LocalizedText("The type for references from a node to an event type that is raised by node.")
attrs.DisplayName = LocalizedText("GeneratesEvent")
attrs.InverseName = LocalizedText("GeneratedBy")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(41, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(32, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3065, 0)
node.BrowseName = QualifiedName('AlwaysGeneratesEvent', 0)
node.NodeClass = NodeClass.ReferenceType
node.ParentNodeId = NumericNodeId(41, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ReferenceTypeAttributes()
attrs.Description = LocalizedText("The type for references from a node to an event type that is always raised by node.")
attrs.DisplayName = LocalizedText("AlwaysGeneratesEvent")
attrs.InverseName = LocalizedText("AlwaysGeneratedBy")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(3065, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(41, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(44, 0)
node.BrowseName = QualifiedName('Aggregates', 0)
node.NodeClass = NodeClass.ReferenceType
node.ParentNodeId = NumericNodeId(34, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ReferenceTypeAttributes()
attrs.Description = LocalizedText("The type for non-looping hierarchical references that are used to aggregate nodes into complex types.")
attrs.DisplayName = LocalizedText("Aggregates")
attrs.InverseName = LocalizedText("AggregatedBy")
attrs.IsAbstract = True
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(44, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(34, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(45, 0)
node.BrowseName = QualifiedName('HasSubtype', 0)
node.NodeClass = NodeClass.ReferenceType
node.ParentNodeId = NumericNodeId(34, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ReferenceTypeAttributes()
attrs.Description = LocalizedText("The type for non-looping hierarchical references that are used to define sub types.")
attrs.DisplayName = LocalizedText("HasSubtype")
attrs.InverseName = LocalizedText("SubtypeOf")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(45, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(34, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(46, 0)
node.BrowseName = QualifiedName('HasProperty', 0)
node.NodeClass = NodeClass.ReferenceType
node.ParentNodeId = NumericNodeId(44, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ReferenceTypeAttributes()
attrs.Description = LocalizedText("The type for non-looping hierarchical reference from a node to its property.")
attrs.DisplayName = LocalizedText("HasProperty")
attrs.InverseName = LocalizedText("PropertyOf")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(46, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(44, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(47, 0)
node.BrowseName = QualifiedName('HasComponent', 0)
node.NodeClass = NodeClass.ReferenceType
node.ParentNodeId = NumericNodeId(44, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ReferenceTypeAttributes()
attrs.Description = LocalizedText("The type for non-looping hierarchical reference from a node to its component.")
attrs.DisplayName = LocalizedText("HasComponent")
attrs.InverseName = LocalizedText("ComponentOf")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(47, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(44, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(48, 0)
node.BrowseName = QualifiedName('HasNotifier', 0)
node.NodeClass = NodeClass.ReferenceType
node.ParentNodeId = NumericNodeId(36, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ReferenceTypeAttributes()
attrs.Description = LocalizedText("The type for non-looping hierarchical references that are used to indicate how events propagate from node to node.")
attrs.DisplayName = LocalizedText("HasNotifier")
attrs.InverseName = LocalizedText("NotifierOf")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(48, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(36, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(49, 0)
node.BrowseName = QualifiedName('HasOrderedComponent', 0)
node.NodeClass = NodeClass.ReferenceType
node.ParentNodeId = NumericNodeId(47, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ReferenceTypeAttributes()
attrs.Description = LocalizedText("The type for non-looping hierarchical reference from a node to its component when the order of references matters.")
attrs.DisplayName = LocalizedText("HasOrderedComponent")
attrs.InverseName = LocalizedText("OrderedComponentOf")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(49, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(47, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(120, 0)
node.BrowseName = QualifiedName('NamingRuleType', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(29, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("Describes a value that specifies the significance of the BrowseName for an instance declaration.")
attrs.DisplayName = LocalizedText("NamingRuleType")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(120, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(12169, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(120, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(29, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(12169, 0)
node.BrowseName = QualifiedName('EnumValues', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(120, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("EnumValues")
attrs.DataType = NumericNodeId(7594, 0)
value = []
extobj = ua.EnumValueType()
extobj.Value = 1
extobj.DisplayName.Text = 'Mandatory'
extobj.Description.Text = 'The BrowseName must appear in all instances of the type.'
value.append(extobj)
extobj = ua.EnumValueType()
extobj.Value = 2
extobj.DisplayName.Text = 'Optional'
extobj.Description.Text = 'The BrowseName may appear in an instance of the type.'
value.append(extobj)
extobj = ua.EnumValueType()
extobj.Value = 3
extobj.DisplayName.Text = 'Constraint'
extobj.Description.Text = 'The modelling rule defines a constraint and the BrowseName is not used in an instance of the type.'
value.append(extobj)
attrs.Value = ua.Variant(value, ua.VariantType.ExtensionObject)
attrs.ValueRank = 1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(12169, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(12169, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(12169, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(120, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3068, 0)
node.BrowseName = QualifiedName('NodeVersion', 0)
node.NodeClass = NodeClass.Variable
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.Description = LocalizedText("The version number of the node (used to indicate changes to references of the owning node).")
attrs.DisplayName = LocalizedText("NodeVersion")
attrs.DataType = ua.NodeId(ua.ObjectIds.String)
attrs.ValueRank = -2
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3068, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(12170, 0)
node.BrowseName = QualifiedName('ViewVersion', 0)
node.NodeClass = NodeClass.Variable
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.Description = LocalizedText("The version number of the view.")
attrs.DisplayName = LocalizedText("ViewVersion")
attrs.DataType = ua.NodeId(ua.ObjectIds.UInt32)
attrs.ValueRank = -2
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(12170, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3067, 0)
node.BrowseName = QualifiedName('Icon', 0)
node.NodeClass = NodeClass.Variable
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.Description = LocalizedText("A small image representing the object.")
attrs.DisplayName = LocalizedText("Icon")
attrs.DataType = NumericNodeId(30, 0)
attrs.ValueRank = -2
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3067, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3069, 0)
node.BrowseName = QualifiedName('LocalTime', 0)
node.NodeClass = NodeClass.Variable
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.Description = LocalizedText("The local time where the owning variable value was collected.")
attrs.DisplayName = LocalizedText("LocalTime")
attrs.DataType = NumericNodeId(8912, 0)
attrs.ValueRank = -2
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3069, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3070, 0)
node.BrowseName = QualifiedName('AllowNulls', 0)
node.NodeClass = NodeClass.Variable
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.Description = LocalizedText("Whether the value of the owning variable is allowed to be null.")
attrs.DisplayName = LocalizedText("AllowNulls")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -2
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3070, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11433, 0)
node.BrowseName = QualifiedName('ValueAsText', 0)
node.NodeClass = NodeClass.Variable
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.Description = LocalizedText("The string representation of the current value for a variable with an enumerated data type.")
attrs.DisplayName = LocalizedText("ValueAsText")
attrs.DataType = ua.NodeId(ua.ObjectIds.LocalizedText)
attrs.ValueRank = -2
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11433, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11498, 0)
node.BrowseName = QualifiedName('MaxStringLength', 0)
node.NodeClass = NodeClass.Variable
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.Description = LocalizedText("The maximum number of bytes supported by the DataVariable.")
attrs.DisplayName = LocalizedText("MaxStringLength")
attrs.DataType = ua.NodeId(ua.ObjectIds.UInt32)
attrs.ValueRank = -2
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11498, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(15002, 0)
node.BrowseName = QualifiedName('MaxCharacters', 0)
node.NodeClass = NodeClass.Variable
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.Description = LocalizedText("The maximum number of Unicode characters supported by the DataVariable.")
attrs.DisplayName = LocalizedText("MaxCharacters")
attrs.DataType = ua.NodeId(ua.ObjectIds.UInt32)
attrs.ValueRank = -2
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(15002, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(12908, 0)
node.BrowseName = QualifiedName('MaxByteStringLength', 0)
node.NodeClass = NodeClass.Variable
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.Description = LocalizedText("The maximum length for a byte string that can be stored in the owning variable.")
attrs.DisplayName = LocalizedText("MaxByteStringLength")
attrs.DataType = ua.NodeId(ua.ObjectIds.UInt32)
attrs.ValueRank = -2
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(12908, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11512, 0)
node.BrowseName = QualifiedName('MaxArrayLength', 0)
node.NodeClass = NodeClass.Variable
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.Description = LocalizedText("The maximum length for an array that can be stored in the owning variable.")
attrs.DisplayName = LocalizedText("MaxArrayLength")
attrs.DataType = ua.NodeId(ua.ObjectIds.UInt32)
attrs.ValueRank = -2
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11512, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11513, 0)
node.BrowseName = QualifiedName('EngineeringUnits', 0)
node.NodeClass = NodeClass.Variable
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.Description = LocalizedText("The engineering units for the value of the owning variable.")
attrs.DisplayName = LocalizedText("EngineeringUnits")
attrs.DataType = NumericNodeId(887, 0)
attrs.ValueRank = -2
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11513, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11432, 0)
node.BrowseName = QualifiedName('EnumStrings', 0)
node.NodeClass = NodeClass.Variable
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.Description = LocalizedText("The human readable strings associated with the values of an enumerated value (when values are sequential).")
attrs.DisplayName = LocalizedText("EnumStrings")
attrs.DataType = ua.NodeId(ua.ObjectIds.LocalizedText)
attrs.ValueRank = 1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11432, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3071, 0)
node.BrowseName = QualifiedName('EnumValues', 0)
node.NodeClass = NodeClass.Variable
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.Description = LocalizedText("The human readable strings associated with the values of an enumerated value (when values have no sequence).")
attrs.DisplayName = LocalizedText("EnumValues")
attrs.DataType = NumericNodeId(7594, 0)
attrs.ValueRank = 1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3071, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(12745, 0)
node.BrowseName = QualifiedName('OptionSetValues', 0)
node.NodeClass = NodeClass.Variable
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.Description = LocalizedText("Contains the human-readable representation for each bit of the bit mask.")
attrs.DisplayName = LocalizedText("OptionSetValues")
attrs.DataType = ua.NodeId(ua.ObjectIds.LocalizedText)
attrs.ValueRank = 1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(12745, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3072, 0)
node.BrowseName = QualifiedName('InputArguments', 0)
node.NodeClass = NodeClass.Variable
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.Description = LocalizedText("The input arguments for a method.")
attrs.DisplayName = LocalizedText("InputArguments")
attrs.DataType = NumericNodeId(296, 0)
attrs.ValueRank = 1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3072, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3073, 0)
node.BrowseName = QualifiedName('OutputArguments', 0)
node.NodeClass = NodeClass.Variable
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.Description = LocalizedText("The output arguments for a method.")
attrs.DisplayName = LocalizedText("OutputArguments")
attrs.DataType = NumericNodeId(296, 0)
attrs.ValueRank = 1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3073, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(16306, 0)
node.BrowseName = QualifiedName('DefaultInputValues', 0)
node.NodeClass = NodeClass.Variable
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.Description = LocalizedText("Specifies the default values for optional input arguments.")
attrs.DisplayName = LocalizedText("DefaultInputValues")
attrs.DataType = ua.NodeId(ua.ObjectIds.String)
attrs.ValueRank = 1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(16306, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(2000, 0)
node.BrowseName = QualifiedName('ImageBMP', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(30, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("An image encoded in BMP format.")
attrs.DisplayName = LocalizedText("ImageBMP")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(2000, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(30, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(2001, 0)
node.BrowseName = QualifiedName('ImageGIF', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(30, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("An image encoded in GIF format.")
attrs.DisplayName = LocalizedText("ImageGIF")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(2001, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(30, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(2002, 0)
node.BrowseName = QualifiedName('ImageJPG', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(30, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("An image encoded in JPEG format.")
attrs.DisplayName = LocalizedText("ImageJPG")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(2002, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(30, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(2003, 0)
node.BrowseName = QualifiedName('ImagePNG', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(30, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("An image encoded in PNG format.")
attrs.DisplayName = LocalizedText("ImagePNG")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(2003, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(30, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(16307, 0)
node.BrowseName = QualifiedName('AudioDataType', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(15, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("An image encoded in PNG format.")
attrs.DisplayName = LocalizedText("AudioDataType")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(16307, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(15, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(256, 0)
node.BrowseName = QualifiedName('IdType', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(29, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("The type of identifier used in a node id.")
attrs.DisplayName = LocalizedText("IdType")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(256, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(7591, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(256, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(29, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(7591, 0)
node.BrowseName = QualifiedName('EnumStrings', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(256, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("EnumStrings")
attrs.DataType = ua.NodeId(ua.ObjectIds.LocalizedText)
attrs.Value = [LocalizedText('Numeric'),LocalizedText('String'),LocalizedText('Guid'),LocalizedText('Opaque')]
attrs.ValueRank = 1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(7591, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(7591, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(7591, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(256, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(257, 0)
node.BrowseName = QualifiedName('NodeClass', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(29, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("A mask specifying the class of the node.")
attrs.DisplayName = LocalizedText("NodeClass")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(257, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11878, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(257, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(29, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11878, 0)
node.BrowseName = QualifiedName('EnumValues', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(257, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("EnumValues")
attrs.DataType = NumericNodeId(7594, 0)
value = []
extobj = ua.EnumValueType()
extobj.Value = 0
extobj.DisplayName.Text = 'Unspecified'
extobj.Description.Text = 'No classes are selected.'
value.append(extobj)
extobj = ua.EnumValueType()
extobj.Value = 1
extobj.DisplayName.Text = 'Object'
extobj.Description.Text = 'The node is an object.'
value.append(extobj)
extobj = ua.EnumValueType()
extobj.Value = 2
extobj.DisplayName.Text = 'Variable'
extobj.Description.Text = 'The node is a variable.'
value.append(extobj)
extobj = ua.EnumValueType()
extobj.Value = 4
extobj.DisplayName.Text = 'Method'
extobj.Description.Text = 'The node is a method.'
value.append(extobj)
extobj = ua.EnumValueType()
extobj.Value = 8
extobj.DisplayName.Text = 'ObjectType'
extobj.Description.Text = 'The node is an object type.'
value.append(extobj)
extobj = ua.EnumValueType()
extobj.Value = 16
extobj.DisplayName.Text = 'VariableType'
extobj.Description.Text = 'The node is an variable type.'
value.append(extobj)
extobj = ua.EnumValueType()
extobj.Value = 32
extobj.DisplayName.Text = 'ReferenceType'
extobj.Description.Text = 'The node is a reference type.'
value.append(extobj)
extobj = ua.EnumValueType()
extobj.Value = 64
extobj.DisplayName.Text = 'DataType'
extobj.Description.Text = 'The node is a data type.'
value.append(extobj)
extobj = ua.EnumValueType()
extobj.Value = 128
extobj.DisplayName.Text = 'View'
extobj.Description.Text = 'The node is a view.'
value.append(extobj)
attrs.Value = ua.Variant(value, ua.VariantType.ExtensionObject)
attrs.ValueRank = 1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11878, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(11878, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11878, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(257, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(94, 0)
node.BrowseName = QualifiedName('PermissionType', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(5, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.DisplayName = LocalizedText("PermissionType")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(94, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(15030, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(94, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(5, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(15030, 0)
node.BrowseName = QualifiedName('OptionSetValues', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(94, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("OptionSetValues")
attrs.DataType = ua.NodeId(ua.ObjectIds.LocalizedText)
attrs.Value = [LocalizedText('Browse'),LocalizedText('ReadRolePermissions'),LocalizedText('WriteAttribute'),LocalizedText('WriteRolePermissions'),LocalizedText('WriteHistorizing'),LocalizedText('Read'),LocalizedText('Write'),LocalizedText('ReadHistory'),LocalizedText('InsertHistory'),LocalizedText('ModifyHistory'),LocalizedText('DeleteHistory'),LocalizedText('ReceiveEvents'),LocalizedText('Call'),LocalizedText('AddReference'),LocalizedText('RemoveReference'),LocalizedText('DeleteNode'),LocalizedText('AddNode')]
attrs.ValueRank = 1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(15030, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(15030, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(15030, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(94, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(15031, 0)
node.BrowseName = QualifiedName('AccessLevelType', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(3, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.DisplayName = LocalizedText("AccessLevelType")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(15031, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(15032, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(15031, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(15032, 0)
node.BrowseName = QualifiedName('OptionSetValues', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(15031, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("OptionSetValues")
attrs.DataType = ua.NodeId(ua.ObjectIds.LocalizedText)
attrs.Value = [LocalizedText('CurrentRead'),LocalizedText('CurrentWrite'),LocalizedText('HistoryRead'),LocalizedText('Reserved'),LocalizedText('HistoryWrite'),LocalizedText('StatusWrite'),LocalizedText('TimestampWrite')]
attrs.ValueRank = 1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(15032, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(15032, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(15032, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(15031, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(15406, 0)
node.BrowseName = QualifiedName('AccessLevelExType', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(7, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.DisplayName = LocalizedText("AccessLevelExType")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(15406, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(15407, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(15406, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(7, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(15407, 0)
node.BrowseName = QualifiedName('OptionSetValues', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(15406, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("OptionSetValues")
attrs.DataType = ua.NodeId(ua.ObjectIds.LocalizedText)
attrs.Value = [LocalizedText('CurrentRead'),LocalizedText('CurrentWrite'),LocalizedText('HistoryRead'),LocalizedText('Reserved'),LocalizedText('HistoryWrite'),LocalizedText('StatusWrite'),LocalizedText('TimestampWrite'),LocalizedText('Reserved'),LocalizedText('Reserved'),LocalizedText('Reserved'),LocalizedText('Reserved'),LocalizedText('Reserved'),LocalizedText('Reserved'),LocalizedText('Reserved'),LocalizedText('Reserved'),LocalizedText('Reserved'),LocalizedText('NonatomicRead'),LocalizedText('NonatomicWrite'),LocalizedText('WriteFullArrayOnly')]
attrs.ValueRank = 1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(15407, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(15407, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(15407, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(15406, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(15033, 0)
node.BrowseName = QualifiedName('EventNotifierType', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(7, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.DisplayName = LocalizedText("EventNotifierType")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(15033, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(15034, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(15033, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(7, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(15034, 0)
node.BrowseName = QualifiedName('OptionSetValues', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(15033, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("OptionSetValues")
attrs.DataType = ua.NodeId(ua.ObjectIds.LocalizedText)
attrs.Value = [LocalizedText('SubscribeToEvents'),LocalizedText('Reserved'),LocalizedText('HistoryRead'),LocalizedText('HistoryWrite')]
attrs.ValueRank = 1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(15034, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(15034, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(15034, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(15033, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(95, 0)
node.BrowseName = QualifiedName('AccessRestrictionType', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(7, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.DisplayName = LocalizedText("AccessRestrictionType")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(95, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(15035, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(95, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(7, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(15035, 0)
node.BrowseName = QualifiedName('OptionSetValues', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(95, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("OptionSetValues")
attrs.DataType = ua.NodeId(ua.ObjectIds.LocalizedText)
attrs.Value = [LocalizedText('SigningRequired'),LocalizedText('EncryptionRequired'),LocalizedText('SessionRequired')]
attrs.ValueRank = 1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(15035, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(15035, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(15035, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(95, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(96, 0)
node.BrowseName = QualifiedName('RolePermissionType', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(22, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.DisplayName = LocalizedText("RolePermissionType")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(96, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(22, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(97, 0)
node.BrowseName = QualifiedName('DataTypeDefinition', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(22, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.DisplayName = LocalizedText("DataTypeDefinition")
attrs.IsAbstract = True
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(97, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(22, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(98, 0)
node.BrowseName = QualifiedName('StructureType', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(29, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.DisplayName = LocalizedText("StructureType")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(98, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(14528, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(98, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(29, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(14528, 0)
node.BrowseName = QualifiedName('EnumStrings', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(98, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("EnumStrings")
attrs.DataType = ua.NodeId(ua.ObjectIds.LocalizedText)
attrs.Value = [LocalizedText('Structure'),LocalizedText('StructureWithOptionalFields'),LocalizedText('Union')]
attrs.ValueRank = 1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(14528, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(14528, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(14528, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(98, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(101, 0)
node.BrowseName = QualifiedName('StructureField', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(22, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.DisplayName = LocalizedText("StructureField")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(101, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(22, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(99, 0)
node.BrowseName = QualifiedName('StructureDefinition', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(97, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.DisplayName = LocalizedText("StructureDefinition")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(99, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(97, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(100, 0)
node.BrowseName = QualifiedName('EnumDefinition', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(97, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.DisplayName = LocalizedText("EnumDefinition")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(100, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(97, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(296, 0)
node.BrowseName = QualifiedName('Argument', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(22, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("An argument for a method.")
attrs.DisplayName = LocalizedText("Argument")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(296, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(22, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(7594, 0)
node.BrowseName = QualifiedName('EnumValueType', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(22, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("A mapping between a value of an enumerated type and a name and description.")
attrs.DisplayName = LocalizedText("EnumValueType")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(7594, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(22, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(102, 0)
node.BrowseName = QualifiedName('EnumField', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(7594, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.DisplayName = LocalizedText("EnumField")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(102, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(7594, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(12755, 0)
node.BrowseName = QualifiedName('OptionSet', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(22, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("This abstract Structured DataType is the base DataType for all DataTypes representing a bit mask.")
attrs.DisplayName = LocalizedText("OptionSet")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(12755, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(22, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(12756, 0)
node.BrowseName = QualifiedName('Union', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(22, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("This abstract DataType is the base DataType for all union DataTypes.")
attrs.DisplayName = LocalizedText("Union")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(12756, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(22, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(12877, 0)
node.BrowseName = QualifiedName('NormalizedString', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(12, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("A string normalized based on the rules in the unicode specification.")
attrs.DisplayName = LocalizedText("NormalizedString")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(12877, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(12, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(12878, 0)
node.BrowseName = QualifiedName('DecimalString', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(12, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("An arbitraty numeric value.")
attrs.DisplayName = LocalizedText("DecimalString")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(12878, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(12, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(12879, 0)
node.BrowseName = QualifiedName('DurationString', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(12, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("A period of time formatted as defined in ISO 8601-2000.")
attrs.DisplayName = LocalizedText("DurationString")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(12879, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(12, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(12880, 0)
node.BrowseName = QualifiedName('TimeString', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(12, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("A time formatted as defined in ISO 8601-2000.")
attrs.DisplayName = LocalizedText("TimeString")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(12880, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(12, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(12881, 0)
node.BrowseName = QualifiedName('DateString', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(12, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("A date formatted as defined in ISO 8601-2000.")
attrs.DisplayName = LocalizedText("DateString")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(12881, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(12, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(290, 0)
node.BrowseName = QualifiedName('Duration', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(11, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("A period of time measured in milliseconds.")
attrs.DisplayName = LocalizedText("Duration")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(290, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(294, 0)
node.BrowseName = QualifiedName('UtcTime', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(13, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("A date/time value specified in Universal Coordinated Time (UTC).")
attrs.DisplayName = LocalizedText("UtcTime")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(294, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(13, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(295, 0)
node.BrowseName = QualifiedName('LocaleId', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(12, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.Description = LocalizedText("An identifier for a user locale.")
attrs.DisplayName = LocalizedText("LocaleId")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(295, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(12, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(8912, 0)
node.BrowseName = QualifiedName('TimeZoneDataType', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(22, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.DisplayName = LocalizedText("TimeZoneDataType")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(8912, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(22, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(128, 0)
node.BrowseName = QualifiedName('Default Binary', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(96, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default Binary")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(128, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(96, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(39, 0)
ref.SourceNodeId = NumericNodeId(128, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(16131, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(128, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(121, 0)
node.BrowseName = QualifiedName('Default Binary', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(97, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default Binary")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(121, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(97, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(39, 0)
ref.SourceNodeId = NumericNodeId(121, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(18178, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(121, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(14844, 0)
node.BrowseName = QualifiedName('Default Binary', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(101, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default Binary")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(14844, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(101, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(39, 0)
ref.SourceNodeId = NumericNodeId(14844, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(18181, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(14844, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(122, 0)
node.BrowseName = QualifiedName('Default Binary', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(99, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default Binary")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(122, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(99, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(39, 0)
ref.SourceNodeId = NumericNodeId(122, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(18184, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(122, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(123, 0)
node.BrowseName = QualifiedName('Default Binary', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(100, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default Binary")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(123, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(100, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(39, 0)
ref.SourceNodeId = NumericNodeId(123, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(18187, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(123, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(298, 0)
node.BrowseName = QualifiedName('Default Binary', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(296, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default Binary")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(298, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(296, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(39, 0)
ref.SourceNodeId = NumericNodeId(298, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(7650, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(298, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(8251, 0)
node.BrowseName = QualifiedName('Default Binary', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(7594, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default Binary")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(8251, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(7594, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(39, 0)
ref.SourceNodeId = NumericNodeId(8251, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(7656, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(8251, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(14845, 0)
node.BrowseName = QualifiedName('Default Binary', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(102, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default Binary")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(14845, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(102, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(39, 0)
ref.SourceNodeId = NumericNodeId(14845, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(14870, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(14845, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(12765, 0)
node.BrowseName = QualifiedName('Default Binary', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(12755, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default Binary")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(12765, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(12755, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(39, 0)
ref.SourceNodeId = NumericNodeId(12765, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(12767, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(12765, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(12766, 0)
node.BrowseName = QualifiedName('Default Binary', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(12756, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default Binary")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(12766, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(12756, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(39, 0)
ref.SourceNodeId = NumericNodeId(12766, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(12770, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(12766, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(8917, 0)
node.BrowseName = QualifiedName('Default Binary', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(8912, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default Binary")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(8917, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(8912, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(39, 0)
ref.SourceNodeId = NumericNodeId(8917, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(8914, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(8917, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(16126, 0)
node.BrowseName = QualifiedName('Default XML', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(96, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default XML")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(16126, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(96, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(39, 0)
ref.SourceNodeId = NumericNodeId(16126, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(16127, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(16126, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(14797, 0)
node.BrowseName = QualifiedName('Default XML', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(97, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default XML")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(14797, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(97, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(39, 0)
ref.SourceNodeId = NumericNodeId(14797, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(18166, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(14797, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(14800, 0)
node.BrowseName = QualifiedName('Default XML', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(101, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default XML")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(14800, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(101, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(39, 0)
ref.SourceNodeId = NumericNodeId(14800, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(18169, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(14800, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(14798, 0)
node.BrowseName = QualifiedName('Default XML', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(99, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default XML")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(14798, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(99, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(39, 0)
ref.SourceNodeId = NumericNodeId(14798, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(18172, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(14798, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(14799, 0)
node.BrowseName = QualifiedName('Default XML', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(100, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default XML")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(14799, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(100, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(39, 0)
ref.SourceNodeId = NumericNodeId(14799, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(18175, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(14799, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(297, 0)
node.BrowseName = QualifiedName('Default XML', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(296, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default XML")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(297, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(296, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(39, 0)
ref.SourceNodeId = NumericNodeId(297, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(8285, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(297, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(7616, 0)
node.BrowseName = QualifiedName('Default XML', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(7594, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default XML")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(7616, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(7594, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(39, 0)
ref.SourceNodeId = NumericNodeId(7616, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(8291, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(7616, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(14801, 0)
node.BrowseName = QualifiedName('Default XML', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(102, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default XML")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(14801, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(102, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(39, 0)
ref.SourceNodeId = NumericNodeId(14801, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(14826, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(14801, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(12757, 0)
node.BrowseName = QualifiedName('Default XML', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(12755, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default XML")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(12757, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(12755, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(39, 0)
ref.SourceNodeId = NumericNodeId(12757, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(12759, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(12757, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(12758, 0)
node.BrowseName = QualifiedName('Default XML', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(12756, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default XML")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(12758, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(12756, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(39, 0)
ref.SourceNodeId = NumericNodeId(12758, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(12762, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(12758, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(8913, 0)
node.BrowseName = QualifiedName('Default XML', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(8912, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default XML")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(8913, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(8912, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(39, 0)
ref.SourceNodeId = NumericNodeId(8913, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(8918, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(8913, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(15062, 0)
node.BrowseName = QualifiedName('Default JSON', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(96, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default JSON")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(15062, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(96, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(15062, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(15063, 0)
node.BrowseName = QualifiedName('Default JSON', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(97, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default JSON")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(15063, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(97, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(15063, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(15065, 0)
node.BrowseName = QualifiedName('Default JSON', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(101, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default JSON")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(15065, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(101, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(15065, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(15066, 0)
node.BrowseName = QualifiedName('Default JSON', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(99, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default JSON")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(15066, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(99, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(15066, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(15067, 0)
node.BrowseName = QualifiedName('Default JSON', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(100, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default JSON")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(15067, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(100, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(15067, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(15081, 0)
node.BrowseName = QualifiedName('Default JSON', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(296, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default JSON")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(15081, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(296, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(15081, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(15082, 0)
node.BrowseName = QualifiedName('Default JSON', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(7594, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default JSON")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(15082, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(7594, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(15082, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(15083, 0)
node.BrowseName = QualifiedName('Default JSON', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(102, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default JSON")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(15083, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(102, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(15083, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(15084, 0)
node.BrowseName = QualifiedName('Default JSON', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(12755, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default JSON")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(15084, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(12755, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(15084, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(15085, 0)
node.BrowseName = QualifiedName('Default JSON', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(12756, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default JSON")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(15085, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(12756, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(15085, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(15086, 0)
node.BrowseName = QualifiedName('Default JSON', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(8912, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default JSON")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(15086, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(8912, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(15086, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
| 38.784793
| 557
| 0.709115
| 14,799
| 141,293
| 6.751537
| 0.039462
| 0.02367
| 0.048221
| 0.054796
| 0.888536
| 0.883202
| 0.879989
| 0.873784
| 0.863675
| 0.858161
| 0
| 0.039266
| 0.185466
| 141,293
| 3,642
| 558
| 38.795442
| 0.828879
| 0.000757
| 0
| 0.822788
| 1
| 0.001431
| 0.070486
| 0.002047
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000286
| false
| 0
| 0.000859
| 0
| 0.001145
| 0.000286
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d9f2c4713f58011ac46063726e895ed2aedbda0d
| 280,319
|
py
|
Python
|
hphp/hack/test/integration/test_lsp.py
|
Raulkumar/hhvm
|
dfc1a50aff4d3a1df508c685fee401262e530879
|
[
"PHP-3.01",
"Zend-2.0"
] | null | null | null |
hphp/hack/test/integration/test_lsp.py
|
Raulkumar/hhvm
|
dfc1a50aff4d3a1df508c685fee401262e530879
|
[
"PHP-3.01",
"Zend-2.0"
] | null | null | null |
hphp/hack/test/integration/test_lsp.py
|
Raulkumar/hhvm
|
dfc1a50aff4d3a1df508c685fee401262e530879
|
[
"PHP-3.01",
"Zend-2.0"
] | 1
|
2021-01-29T08:44:22.000Z
|
2021-01-29T08:44:22.000Z
|
# pyre-strict
# flake8: noqa: B950
from __future__ import absolute_import, division, print_function, unicode_literals
import copy
import enum
import json
import os
import re
import unittest
import urllib.parse
from typing import Iterable, List, Mapping, Tuple
import common_tests
from hh_paths import hh_server
from lspcommand import LspCommandProcessor, Transcript
from lsptestspec import LspTestSpec, NoResponse, line
from test_case import TestCase
from utils import Json, JsonObject, interpolate_variables
class InsertTextFormat(enum.Enum):
PlainText = 1
Snippet = 2
class LspTestDriver(common_tests.CommonTestDriver):
def write_load_config(self, use_saved_state: bool = False) -> None:
# Will use the .hhconfig already in the repo directory
# As for hh.conf, we'll write it explicitly each test.
# Note that hh.conf uses lower-case...
use_saved_state_str = "true" if use_saved_state else "false"
with open(os.path.join(self.repo_dir, "hh.conf"), "w") as f:
f.write(
"""
use_watchman = true
watchman_subscribe_v2 = true
interrupt_on_watchman = true
interrupt_on_client = true
max_workers = 2
load_state_natively_v4 = {use_saved_state}
use_mini_state = {use_saved_state}
require_mini_state = {use_saved_state}
lazy_decl = {use_saved_state}
lazy_parse = {use_saved_state}
lazy_init2 = {use_saved_state}
symbolindex_search_provider = SqliteIndex
""".format(
use_saved_state=use_saved_state_str
)
)
def write_naming_table_saved_state(self) -> str:
naming_table_saved_state_path = os.path.join(
self.repo_dir, "naming_table_saved_state.sqlite"
)
(stdout, stderr, retcode) = self.proc_call(
[
hh_server,
"--check",
self.repo_dir,
"--save-naming",
naming_table_saved_state_path,
]
)
assert retcode == 0, (
f"Failed to save naming table saved state: {retcode}\n"
+ f"STDOUT:\n{stdout}\n"
+ f"STDERR:\n{stderr}\n"
)
return naming_table_saved_state_path
class TestLsp(TestCase[LspTestDriver]):
@classmethod
def get_test_driver(cls) -> LspTestDriver:
return LspTestDriver()
@classmethod
def get_template_repo(cls) -> str:
return "hphp/hack/test/integration/data/lsp_exchanges/"
def repo_file(self, file: str) -> str:
return os.path.join(self.test_driver.repo_dir, file)
def read_repo_file(self, file: str) -> str:
with open(self.repo_file(file), "r") as f:
return f.read()
def repo_file_uri(self, file: str) -> str:
return urllib.parse.urljoin("file://", self.repo_file(file))
# pyre-fixme[11]: Annotation `Json` is not defined as a type.
def parse_test_data(self, file: str, variables: Mapping[str, str]) -> Json:
text = self.read_repo_file(file)
data: Json = json.loads(text)
data = interpolate_variables(data, variables)
return data
def load_test_data(
self, test_name: str, variables: Mapping[str, str]
) -> Tuple[Json, Json]:
test = self.parse_test_data(test_name + ".json", variables)
expected = self.parse_test_data(test_name + ".expected", variables)
return (test, expected)
def write_observed(self, test_name: str, observed_transcript: Json) -> None:
file = os.path.join(self.test_driver.template_repo, test_name + ".observed.log")
text = json.dumps(
list(self.get_important_received_items(observed_transcript)), indent=2
)
with open(file, "w") as f:
f.write(text)
# pyre-fixme[11]: Annotation `JsonObject` is not defined as a type.
def order_response(self, response: JsonObject) -> str:
if "id" in response:
return str(response["id"])
else:
return json.dumps(response, indent=2)
# sorts a list of responses using the 'id' parameter so they can be
# compared in sequence even if they came back from the server out of sequence.
# this can happen based on how json rpc is specified to work.
# if 'id' isn't present the response is a notification. we sort notifications
# by their entire text.
def sort_responses(self, responses: Iterable[JsonObject]) -> List[JsonObject]:
return sorted(responses, key=lambda response: self.order_response(response))
# removes stack traces from error responses since these can be noisy
# as code changes and they contain execution environment specific details
# by ignoring these when comparing responses we might miss some minor issues
# but will still catch the core error being thrown or not.
def sanitize_exceptions(
self, responses: Iterable[JsonObject]
) -> Iterable[JsonObject]:
sanitized = copy.deepcopy(responses)
for response in sanitized:
if "error" in response:
if "data" in response["error"]:
if "stack" in response["error"]["data"]:
del response["error"]["data"]["stack"]
if "current_stack" in response["error"]["data"]:
del response["error"]["data"]["current_stack"]
if "server_finale_stack" in response["error"]["data"]:
del response["error"]["data"]["server_finale_stack"]
return sanitized
# dumps an LSP response into a standard json format that can be used for
# doing precise text comparison in a way that is human readable in the case
# of there being an error.
def serialize_responses(self, responses: Iterable[Json]) -> List[str]:
return [json.dumps(response, indent=2) for response in responses]
# generates received responses from an LSP communication transcript
# ignoring the non-deterministic ones "progress" and "actionRequired"
def get_important_received_items(self, transcript: Transcript) -> Iterable[Json]:
for entry in transcript.values():
received = entry.received or None
if received is None:
continue
method = received.get("method") or ""
if method in [
"window/progress",
"window/actionRequired",
"window/showStatus",
"telemetry/event",
]:
continue
yield received
# gets a set of loaded responses ready for validation by sorting them
# by id and serializing them for precise text comparison
def prepare_responses(self, responses: Iterable[JsonObject]) -> List[str]:
return self.serialize_responses(
self.sanitize_exceptions(self.sort_responses(responses))
)
def run_lsp_test(
self,
test_name: str,
test: Json,
expected: Json,
wait_for_server: bool,
use_serverless_ide: bool,
) -> None:
if wait_for_server:
assert not use_serverless_ide, (
"Warning: both `wait_for_server` and `use_serverless_ide` "
+ "were set to `True` for testing in "
+ self.run_lsp_test.__name__
+ ". "
+ "While this is a possible test case, it hasn't been written yet, "
+ "so it's more likely that this is a mistake "
+ "and you're accidentally relying on hh_server to fulfill "
+ "serverless IDE requests."
+ "(If you're writing that test, "
+ "then it's time to remove this assertion.)"
)
# wait until hh_server is ready before starting lsp
self.test_driver.run_check()
elif use_serverless_ide:
self.test_driver.stop_hh_server()
with LspCommandProcessor.create(
self.test_driver.test_env, use_serverless_ide=use_serverless_ide
) as lsp:
observed_transcript = lsp.communicate(test)
self.write_observed(test_name, observed_transcript)
expected_items = self.prepare_responses(expected)
observed_items = self.prepare_responses(
list(self.get_important_received_items(observed_transcript))
)
if not use_serverless_ide:
# If the server's busy, maybe the machine's just under too much
# pressure to give results in a timely fashion. Doing a retry would
# only defer the question of what to do in that case, so instead
# we'll just skip.
self.throw_on_skip(observed_transcript)
# validation checks that the number of items matches and that
# the responses are exactly identical to what we expect
self.assertEqual(
len(expected_items),
len(observed_items),
"Wrong count. Observed this:\n"
+ json.dumps(observed_transcript, indent=2, separators=(",", ": ")),
)
for i in range(len(expected_items)):
self.assertEqual(expected_items[i], observed_items[i])
def throw_on_skip(self, transcript: Transcript) -> None:
failure_messages = ["Server busy", "timed out"]
for entry in transcript.values():
received = entry.received
if received is None:
continue
if received.get("error"):
message = received["error"]["message"]
for failure_message in failure_messages:
if failure_message in message:
raise unittest.SkipTest(message)
def prepare_server_environment(self) -> None:
self.maxDiff = None
self.test_driver.write_load_config()
self.test_driver.start_hh_server()
(output, err, _) = self.test_driver.run_check()
if "Error: Ran out of retries" in err:
raise unittest.SkipTest("Hack server could not be launched")
self.assertEqual(output.strip(), "No errors!")
def prepare_serverless_ide_environment(self) -> Mapping[str, str]:
self.maxDiff = None
self.test_driver.write_load_config(use_saved_state=False)
naming_table_saved_state_path = (
self.test_driver.write_naming_table_saved_state()
)
return {"naming_table_saved_state_path": naming_table_saved_state_path}
def load_and_run(
self,
test_name: str,
variables: Mapping[str, str],
wait_for_server: bool = True,
use_serverless_ide: bool = False,
) -> None:
test, expected = self.load_test_data(test_name, variables)
self.run_lsp_test(
test_name=test_name,
test=test,
expected=expected,
wait_for_server=wait_for_server,
use_serverless_ide=use_serverless_ide,
)
def run_spec(
self,
spec: LspTestSpec,
variables: Mapping[str, str],
wait_for_server: bool,
use_serverless_ide: bool,
) -> None:
if wait_for_server:
# wait until hh_server is ready before starting lsp
self.test_driver.run_check()
elif use_serverless_ide:
self.test_driver.stop_hh_server()
with LspCommandProcessor.create(
self.test_driver.test_env, use_serverless_ide=use_serverless_ide
) as lsp_command_processor:
(observed_transcript, error_details) = spec.run(
lsp_command_processor=lsp_command_processor, variables=variables
)
file = os.path.join(self.test_driver.template_repo, spec.name + ".sent.log")
text = json.dumps(
[
sent
for sent, _received in observed_transcript.values()
if sent is not None
],
indent=2,
)
with open(file, "w") as f:
f.write(text)
file = os.path.join(self.test_driver.template_repo, spec.name + ".received.log")
text = json.dumps(
[
received
for _sent, received in observed_transcript.values()
if received is not None
],
indent=2,
)
with open(file, "w") as f:
f.write(text)
if not use_serverless_ide:
# If the server's busy, maybe the machine's just under too much
# pressure to give results in a timely fashion. Doing a retry would
# only defer the question of what to do in that case, so instead
# we'll just skip.
self.throw_on_skip(observed_transcript)
if error_details is not None:
raise AssertionError(error_details)
def setup_php_file(self, test_php: str) -> Mapping[str, str]:
# We want the path to the builtins directory. This is best we can do.
(output, err, retcode) = self.test_driver.run_check(
options=["--identify-function", "2:21", "--json"],
stdin="<?hh // partial\nfunction f():void {PHP_EOL;}\n",
)
if retcode == 7:
self.skipTest(
"Could not discover builtins directory -- "
+ "got exit code 7 (either Out_of_time or Out_of_retries). "
+ "The test machine is likely under too much load."
)
self.assertEqual(retcode, 0)
constants_path = json.loads(output)[0]["definition_pos"]["filename"]
return {
"hhi_path": re.sub("/constants.hhi$", "", constants_path),
"root_path": self.test_driver.repo_dir,
"php_file_uri": self.repo_file_uri(test_php),
"php_file": self.read_repo_file(test_php),
}
def test_init_shutdown(self) -> None:
self.prepare_server_environment()
self.load_and_run(
"initialize_shutdown", {"root_path": self.test_driver.repo_dir}
)
def test_serverless_ide_completion(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("completion.php"))
self.test_driver.stop_hh_server()
spec = (
self.initialize_spec(LspTestSpec("ide_completion"), use_serverless_ide=True)
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.notification(
comment="Add '$x = $point1['' to test autocomplete for shapes",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 22, "character": 0},
"end": {"line": 22, "character": 0},
},
"text": "$x = $point1['",
}
],
},
)
.request(
line=line(),
comment="autocomplete after user types a shape",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 22, "character": 14},
},
result={
"isIncomplete": False,
"items": [
{
"label": "'x'",
"kind": 12,
"detail": "literal",
"inlineDetail": "literal",
"sortText": "'x'",
"insertText": "'x'",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": "'x'",
"filename": "${root_path}/completion.php",
"line": 22,
"char": 19,
},
},
{
"label": "'y'",
"kind": 12,
"detail": "literal",
"inlineDetail": "literal",
"sortText": "'y'",
"insertText": "'y'",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": "'y'",
"filename": "${root_path}/completion.php",
"line": 22,
"char": 30,
},
},
],
},
powered_by="serverless_ide",
)
.notification(
comment="Add automatically closed apostrophes when typing a shape key, the way visual studio code does it",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 22, "character": 0},
"end": {"line": 22, "character": 14},
},
"text": "$x = $point1['']",
}
],
},
)
.request(
line=line(),
comment="autocomplete after a shape, with VS Code automatically closed apostrophes",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 22, "character": 14},
},
result={
"isIncomplete": False,
"items": [
{
"label": "'x",
"kind": 12,
"detail": "literal",
"inlineDetail": "literal",
"sortText": "'x",
"insertText": "'x",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": "'x'",
"filename": "${root_path}/completion.php",
"line": 22,
"char": 19,
},
},
{
"label": "'y",
"kind": 12,
"detail": "literal",
"inlineDetail": "literal",
"sortText": "'y",
"insertText": "'y",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": "'y'",
"filename": "${root_path}/completion.php",
"line": 22,
"char": 30,
},
},
],
},
powered_by="serverless_ide",
)
.notification(
comment="Add '$x = <'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 0},
},
"text": "$x = <",
}
],
},
)
.request(
line=line(),
comment="autocomplete after '$x = <'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 6},
},
result={
"isIncomplete": False,
"items": [
{
"label": "ab:cd:alpha",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"sortText": "ab:cd:alpha",
"insertText": "ab:cd:alpha",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:alpha"},
},
{
"label": "ab:cd:text",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"sortText": "ab:cd:text",
"insertText": "ab:cd:text",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:text"},
},
],
},
powered_by="serverless_ide",
)
.notification(
comment="Add '$x = <a'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 6},
},
"text": "$x = <a",
}
],
},
)
.request(
line=line(),
comment="autocomplete after '$x = <a'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 7},
},
result={
"isIncomplete": False,
"items": [
{
"label": "ab:cd:alpha",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"sortText": "ab:cd:alpha",
"insertText": "ab:cd:alpha",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:alpha"},
},
{
"label": "ab:cd:text",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"sortText": "ab:cd:text",
"insertText": "ab:cd:text",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:text"},
},
],
},
powered_by="serverless_ide",
)
.notification(
comment="Add '$x = <ab:'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 7},
},
"text": "$x = <ab:",
}
],
},
)
.request(
line=line(),
comment="autocomplete after '$x = <ab:'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 9},
},
result={
"isIncomplete": False,
"items": [
{
"label": "ab:cd:alpha",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"sortText": "ab:cd:alpha",
"insertText": "ab:cd:alpha",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:alpha"},
},
{
"label": "ab:cd:text",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"sortText": "ab:cd:text",
"insertText": "ab:cd:text",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:text"},
},
],
},
powered_by="serverless_ide",
)
.notification(
comment="Add '$x = <ab:cd:text '",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 9},
},
"text": "$x = <ab:cd:text ",
}
],
},
)
.request(
line=line(),
comment="autocomplete after '$x = <ab:cd:text '",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 17},
},
result={
"isIncomplete": False,
"items": [
{
"label": "width",
"kind": 10,
"detail": "?int",
"inlineDetail": "?int",
"sortText": "width",
"insertText": "width",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": ":width",
"filename": "${root_path}/completion_extras.php",
"line": 5,
"char": 27,
"base_class": "\\:ab:cd:text",
},
},
{
"label": "color",
"kind": 10,
"detail": "?string",
"inlineDetail": "?string",
"sortText": "color",
"insertText": "color",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": ":color",
"filename": "${root_path}/completion_extras.php",
"line": 5,
"char": 13,
"base_class": "\\:ab:cd:text",
},
},
],
},
powered_by="serverless_ide",
)
.notification(
comment="Add '$x = <ab:cd:text w'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 17},
},
"text": "$x = <ab:cd:text w",
}
],
},
)
.request(
line=line(),
comment="autocomplete after '$x = <ab:cd:text w'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 18},
},
result={
"isIncomplete": False,
"items": [
{
"label": "width",
"kind": 10,
"detail": "?int",
"inlineDetail": "?int",
"sortText": "width",
"insertText": "width",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": ":width",
"filename": "${root_path}/completion_extras.php",
"line": 5,
"char": 27,
"base_class": "\\:ab:cd:text",
},
},
{
"label": "color",
"kind": 10,
"detail": "?string",
"inlineDetail": "?string",
"sortText": "color",
"insertText": "color",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": ":color",
"filename": "${root_path}/completion_extras.php",
"line": 5,
"char": 13,
"base_class": "\\:ab:cd:text",
},
},
],
},
powered_by="serverless_ide",
)
.notification(
comment="Add '$x = new :'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 18},
},
"text": "$x = new :",
}
],
},
)
.request(
line=line(),
comment="autocomplete after '$x = new :'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 10},
},
result={
"isIncomplete": False,
"items": [
{
"label": ":ab:cd:alpha",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"sortText": ":ab:cd:alpha",
"insertText": ":ab:cd:alpha",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:alpha"},
},
{
"label": ":ab:cd:text",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"sortText": ":ab:cd:text",
"insertText": ":ab:cd:text",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:text"},
},
],
},
powered_by="serverless_ide",
)
.notification(
comment="Add '$x = new :a'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 10},
},
"text": "$x = new :a",
}
],
},
)
.request(
line=line(),
comment="autocomplete after '$x = new :a'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 11},
},
result={
"isIncomplete": False,
"items": [
{
"label": ":ab:cd:alpha",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"sortText": ":ab:cd:alpha",
"insertText": ":ab:cd:alpha",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:alpha"},
},
{
"label": ":ab:cd:text",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"sortText": ":ab:cd:text",
"insertText": ":ab:cd:text",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:text"},
},
],
},
powered_by="serverless_ide",
)
# Note that this request should match the result in the previous example
.request(
line=line(),
comment="autocomplete resolving after '$x = new :a'",
method="completionItem/resolve",
params={
"label": ":ab:cd:alpha",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"itemType": ":ab:cd:alpha",
"insertText": ":ab:cd:alpha",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:alpha"},
},
result={
"label": ":ab:cd:alpha",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"itemType": ":ab:cd:alpha",
"documentation": {
"kind": "markdown",
"value": ":ab:cd:alpha docblock",
},
"insertText": ":ab:cd:alpha",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:alpha"},
},
powered_by="serverless_ide",
)
# Try the same thing again, but this time without "new", instead using "<xhp" style
.notification(
comment="Add '$x = <a'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 11},
},
"text": "$x = <a",
}
],
},
)
.request(
line=line(),
comment="autocomplete after '$x = <a'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 7},
},
result={
"isIncomplete": False,
"items": [
{
"label": "ab:cd:alpha",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"sortText": "ab:cd:alpha",
"insertText": "ab:cd:alpha",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:alpha"},
},
{
"label": "ab:cd:text",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"sortText": "ab:cd:text",
"insertText": "ab:cd:text",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:text"},
},
],
},
powered_by="serverless_ide",
)
.request(
line=line(),
comment="autocomplete resolving after '$x = <a'",
method="completionItem/resolve",
params={
"label": "ab:cd:alpha",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"insertText": "ab:cd:alpha",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:alpha"},
},
result={
"label": "ab:cd:alpha",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"documentation": {
"kind": "markdown",
"value": ":ab:cd:alpha docblock",
},
"insertText": "ab:cd:alpha",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:alpha"},
},
powered_by="serverless_ide",
)
.notification(
comment="Add '$x = <ab:cd:text/>; $y = $x->'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 7},
},
"text": "$x = <ab:cd:text/>; $y = $x->",
}
],
},
)
.request(
line=line(),
comment="autocomplete after '$x = <ab:cd:text/>; $y = $x->'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 29},
},
result={
"isIncomplete": False,
"items": [
{
"label": ":width",
"kind": 10,
"detail": "?int",
"inlineDetail": "?int",
"sortText": ":width",
"insertText": ":width",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": ":width",
"filename": "${root_path}/completion_extras.php",
"line": 5,
"char": 27,
"base_class": "\\:ab:cd:text",
},
},
{
"label": ":color",
"kind": 10,
"detail": "?string",
"inlineDetail": "?string",
"sortText": ":color",
"insertText": ":color",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": ":color",
"filename": "${root_path}/completion_extras.php",
"line": 5,
"char": 13,
"base_class": "\\:ab:cd:text",
},
},
],
},
powered_by="serverless_ide",
)
.notification(
comment="Add '$x = <ab:cd:text/>; $y = $x->:'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 29},
},
"text": "$x = <ab:cd:text/>; $y = $x->:",
}
],
},
)
.request(
line=line(),
comment="autocomplete after '$x = <ab:cd:text/>; $y = $x->:'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 30},
},
result={
"isIncomplete": False,
"items": [
{
"label": ":width",
"kind": 10,
"detail": "?int",
"inlineDetail": "?int",
"sortText": ":width",
"insertText": ":width",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": ":width",
"filename": "${root_path}/completion_extras.php",
"line": 5,
"char": 27,
"base_class": "\\:ab:cd:text",
},
},
{
"label": ":color",
"kind": 10,
"detail": "?string",
"inlineDetail": "?string",
"sortText": ":color",
"insertText": ":color",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": ":color",
"filename": "${root_path}/completion_extras.php",
"line": 5,
"char": 13,
"base_class": "\\:ab:cd:text",
},
},
],
},
powered_by="serverless_ide",
)
.notification(
comment="Add 'test_fun'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 30},
},
"text": "test_fun",
}
],
},
)
.request(
line=line(),
comment="autocomplete after 'test_fun'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 8},
},
result={
"isIncomplete": False,
"items": [
{
"label": "test_function",
"kind": 3,
"detail": "function",
"inlineDetail": "function",
"sortText": "test_function",
"insertText": "test_function",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": "test_function"},
}
],
},
powered_by="serverless_ide",
)
.request(
line=line(),
comment="autocomplete resolving after 'test_fun'",
method="completionItem/resolve",
params={
"label": "test_function",
"kind": 3,
"detail": "function(): void",
"inlineDetail": "()",
"itemType": "void",
"insertText": "test_function",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"filename": "${root_path}/completion.php",
"line": 8,
"char": 10,
},
},
result={
"label": "test_function",
"kind": 3,
"detail": "function(): void",
"inlineDetail": "()",
"itemType": "void",
"documentation": {
"kind": "markdown",
"value": "test_function docblock.",
},
"insertText": "test_function",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"filename": "${root_path}/completion.php",
"line": 8,
"char": 10,
},
},
powered_by="serverless_ide",
)
.notification(
comment="Add 'switch (Elsa::Alonso) { case Elsa:'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 8},
},
"text": "switch (Elsa::Alonso) { case Elsa:",
}
],
},
)
.request(
line=line(),
comment="autocomplete after 'switch (Elsa::Alonso) { case Elsa:'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 34},
},
result={"isIncomplete": False, "items": []},
powered_by="serverless_ide",
)
.notification(
comment="Add 'switch (Elsa::Alonso) { case Elsa::'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 34},
},
"text": "switch (Elsa::Alonso) { case Elsa::",
}
],
},
)
.request(
line=line(),
comment="autocomplete after 'switch (Elsa::Alonso) { case Elsa::'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 35},
},
result={
"isIncomplete": False,
"items": [
{
"label": "class",
"kind": 21,
"detail": "classname<this>",
"inlineDetail": "classname<this>",
"sortText": "class",
"insertText": "class",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": "class",
"filename": "${root_path}/completion_extras.php",
"line": 13,
"char": 6,
"base_class": "\\Elsa",
},
},
{
"label": "Bard",
"kind": 21,
"detail": "Elsa",
"inlineDetail": "Elsa",
"sortText": "Bard",
"insertText": "Bard",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": "Bard",
"filename": "${root_path}/completion_extras.php",
"line": 13,
"char": 12,
"base_class": "\\Elsa",
},
},
{
"label": "Alonso",
"kind": 21,
"detail": "Elsa",
"inlineDetail": "Elsa",
"sortText": "Alonso",
"insertText": "Alonso",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": "Alonso",
"filename": "${root_path}/completion_extras.php",
"line": 13,
"char": 12,
"base_class": "\\Elsa",
},
},
{
"label": "isValid",
"kind": 2,
"detail": "function(mixed $value): bool",
"inlineDetail": "(mixed $value)",
"itemType": "bool",
"sortText": "isValid",
"insertText": "isValid(${1:\\$value})",
"insertTextFormat": InsertTextFormat.Snippet.value,
"data": {
"fullname": "isValid",
"filename": "${hhi_path}/BuiltinEnum.hhi",
"line": 49,
"char": 32,
"base_class": "\\Elsa",
},
},
{
"label": "getValues",
"kind": 2,
"detail": "function(): darray<string, Elsa>",
"inlineDetail": "()",
"itemType": "darray<string, Elsa>",
"sortText": "getValues",
"insertText": "getValues()",
"insertTextFormat": InsertTextFormat.Snippet.value,
"data": {
"fullname": "getValues",
"filename": "${hhi_path}/BuiltinEnum.hhi",
"line": 34,
"char": 32,
"base_class": "\\Elsa",
},
},
{
"label": "getNames",
"kind": 2,
"detail": "function(): darray<Elsa, string>",
"inlineDetail": "()",
"itemType": "darray<Elsa, string>",
"sortText": "getNames",
"insertText": "getNames()",
"insertTextFormat": InsertTextFormat.Snippet.value,
"data": {
"fullname": "getNames",
"filename": "${hhi_path}/BuiltinEnum.hhi",
"line": 43,
"char": 32,
"base_class": "\\Elsa",
},
},
{
"label": "coerce",
"kind": 2,
"detail": "function(mixed $value): ?Elsa",
"inlineDetail": "(mixed $value)",
"itemType": "?Elsa",
"sortText": "coerce",
"insertText": "coerce(${1:\\$value})",
"insertTextFormat": InsertTextFormat.Snippet.value,
"data": {
"fullname": "coerce",
"filename": "${hhi_path}/BuiltinEnum.hhi",
"line": 56,
"char": 32,
"base_class": "\\Elsa",
},
},
{
"label": "assertAll",
"kind": 2,
"detail": "function(Traversable<mixed> $values): Container<Elsa>",
"inlineDetail": "(Traversable<mixed> $values)",
"itemType": "Container<Elsa>",
"sortText": "assertAll",
"insertText": "assertAll(${1:\\$values})",
"insertTextFormat": InsertTextFormat.Snippet.value,
"data": {
"fullname": "assertAll",
"filename": "${hhi_path}/BuiltinEnum.hhi",
"line": 70,
"char": 32,
"base_class": "\\Elsa",
},
},
{
"label": "assert",
"kind": 2,
"detail": "function(mixed $value): Elsa",
"inlineDetail": "(mixed $value)",
"itemType": "Elsa",
"sortText": "assert",
"insertText": "assert(${1:\\$value})",
"insertTextFormat": InsertTextFormat.Snippet.value,
"data": {
"fullname": "assert",
"filename": "${hhi_path}/BuiltinEnum.hhi",
"line": 63,
"char": 32,
"base_class": "\\Elsa",
},
},
],
},
powered_by="serverless_ide",
)
.notification(
comment="Add 'switch (Elsa::Alonso) { case Elsa::Alonso:'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 35},
},
"text": "switch (Elsa::Alonso) { case Elsa::Alonso:",
}
],
},
)
.request(
line=line(),
comment="docblock resolve after 'switch (Elsa::Alonso) { case Elsa::'",
method="completionItem/resolve",
params={
"label": "isValid",
"kind": 2,
"detail": "function(mixed $value): bool",
"inlineDetail": "(mixed $value)",
"itemType": "bool",
"insertTextFormat": InsertTextFormat.PlainText.value,
"textEdit": {
"range": {
"start": {"line": 3, "character": 35},
"end": {"line": 3, "character": 35},
},
"newText": "isValid",
},
"data": {
"filename": "${hhi_path}/BuiltinEnum.hhi",
"line": 49,
"char": 32,
},
},
result={
"label": "isValid",
"kind": 2,
"detail": "function(mixed $value): bool",
"inlineDetail": "(mixed $value)",
"itemType": "bool",
"documentation": {
"kind": "markdown",
"value": "Returns whether or not the value is defined as a constant.",
},
"insertTextFormat": InsertTextFormat.PlainText.value,
"textEdit": {
"range": {
"start": {"line": 3, "character": 35},
"end": {"line": 3, "character": 35},
},
"newText": "isValid",
},
"data": {
"filename": "${hhi_path}/BuiltinEnum.hhi",
"line": 49,
"char": 32,
},
},
powered_by="serverless_ide",
)
.request(
line=line(),
comment="autocomplete after 'switch (Elsa::Alonso) { case Elsa::Alonso:'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 42},
},
result={"isIncomplete": False, "items": []},
powered_by="serverless_ide",
)
.notification(
comment="Add 'TestNS\\'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 42},
},
"text": "TestNS\\",
}
],
},
)
.request(
line=line(),
comment="autocomplete after 'TestNS\\'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 7},
},
result={
"isIncomplete": False,
"items": [
{
"label": "test_func",
"kind": 3,
"detail": "function",
"inlineDetail": "function",
"sortText": "test_func",
"insertText": "test_func",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": "TestNS\\test_func"},
}
],
},
powered_by="serverless_ide",
)
.notification(
comment="Add '$cc = new CompletionClass(); $cc->interfa'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 7},
},
"text": "$cc = new CompletionClass(); $cc->interfa",
}
],
},
)
.request(
line=line(),
comment="autocomplete after '$cc = new CompletionClass(); $cc->interfa'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 41},
},
result={
"isIncomplete": False,
"items": [
{
"label": "interfaceDocBlockMethod",
"kind": 2,
"detail": "function(): void",
"inlineDetail": "()",
"itemType": "void",
"sortText": "interfaceDocBlockMethod",
"insertText": "interfaceDocBlockMethod()",
"insertTextFormat": InsertTextFormat.Snippet.value,
"data": {
"fullname": "interfaceDocBlockMethod",
"filename": "${root_path}/completion.php",
"line": 18,
"char": 19,
"base_class": "\\CompletionClass",
},
}
],
},
powered_by="serverless_ide",
)
.request(
line=line(),
comment="autocomplete resolving after '$cc = new CompletionClass(); $cc->interfa'",
method="completionItem/resolve",
params={
"label": "interfaceDocBlockMethod",
"kind": 2,
"detail": "function(): void",
"inlineDetail": "()",
"itemType": "void",
"insertTextFormat": InsertTextFormat.PlainText.value,
"textEdit": {
"range": {
"start": {"line": 3, "character": 34},
"end": {"line": 3, "character": 41},
},
"newText": "interfaceDocBlockMethod",
},
"data": {
"filename": "${root_path}/completion.php",
"line": 18,
"char": 19,
},
},
result={
"label": "interfaceDocBlockMethod",
"kind": 2,
"detail": "function(): void",
"inlineDetail": "()",
"itemType": "void",
"insertTextFormat": InsertTextFormat.PlainText.value,
"textEdit": {
"range": {
"start": {"line": 3, "character": 34},
"end": {"line": 3, "character": 41},
},
"newText": "interfaceDocBlockMethod",
},
"data": {
"filename": "${root_path}/completion.php",
"line": 18,
"char": 19,
},
},
powered_by="serverless_ide",
)
.notification(
comment="Add 'DeprecatedClass::'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 41},
},
"text": "DeprecatedClass::",
}
],
},
)
.request(
line=line(),
comment="autocomplete after 'DeprecatedClass::'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 17},
},
result={
"isIncomplete": False,
"items": [
{
"label": "class",
"kind": 21,
"detail": "classname<this>",
"inlineDetail": "classname<this>",
"sortText": "class",
"insertText": "class",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": "class",
"filename": "${root_path}/completion_extras.php",
"line": 18,
"char": 13,
"base_class": "\\DeprecatedClass",
},
},
{
"label": "test_do_not_use",
"kind": 2,
"detail": "function(): void",
"inlineDetail": "()",
"itemType": "void",
"sortText": "~test_do_not_use",
"insertText": "test_do_not_use()",
"insertTextFormat": InsertTextFormat.Snippet.value,
"data": {
"fullname": "test_do_not_use",
"filename": "${root_path}/completion_extras.php",
"line": 22,
"char": 26,
"base_class": "\\DeprecatedClass",
},
},
{
"label": "getName",
"kind": 2,
"detail": "function(): void",
"inlineDetail": "()",
"itemType": "void",
"sortText": "getName",
"insertText": "getName()",
"insertTextFormat": InsertTextFormat.Snippet.value,
"data": {
"fullname": "getName",
"filename": "${root_path}/completion_extras.php",
"line": 19,
"char": 26,
"base_class": "\\DeprecatedClass",
},
},
{
"label": "getAttributes_DO_NOT_USE",
"kind": 2,
"detail": "function(): void",
"inlineDetail": "()",
"itemType": "void",
"sortText": "~getAttributes_DO_NOT_USE",
"insertText": "getAttributes_DO_NOT_USE()",
"insertTextFormat": InsertTextFormat.Snippet.value,
"data": {
"fullname": "getAttributes_DO_NOT_USE",
"filename": "${root_path}/completion_extras.php",
"line": 21,
"char": 26,
"base_class": "\\DeprecatedClass",
},
},
{
"label": "__getLoader",
"kind": 2,
"detail": "function(): void",
"inlineDetail": "()",
"itemType": "void",
"sortText": "~__getLoader",
"insertText": "__getLoader()",
"insertTextFormat": InsertTextFormat.Snippet.value,
"data": {
"fullname": "__getLoader",
"filename": "${root_path}/completion_extras.php",
"line": 20,
"char": 26,
"base_class": "\\DeprecatedClass",
},
},
],
},
powered_by="serverless_ide",
)
.notification(
comment="Add 'call_lambda(3, $m'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 30, "character": 0},
"end": {"line": 30, "character": 0},
},
"text": " call_lambda(3, $m",
}
],
},
)
.request(
line=line(),
comment="autocomplete results for 'call_lambda(3, $m'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 30, "character": 19},
},
result={
"isIncomplete": False,
"items": [
{
"label": "$mylambda",
"kind": 6,
"detail": "local variable",
"inlineDetail": "(num $n)",
"itemType": "int",
"sortText": "$mylambda",
"insertText": "$mylambda",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": "$mylambda",
"filename": "${root_path}/completion.php",
"line": 30,
"char": 15,
},
}
],
},
powered_by="serverless_ide",
)
.request(
line=line(),
comment="resolve autocompletion for $mylambda'",
method="completionItem/resolve",
params={
"label": "$mylambda",
"kind": 6,
"detail": "local variable",
"inlineDetail": "(num $n)",
"itemType": "int",
"insertTextFormat": InsertTextFormat.PlainText.value,
"textEdit": {
"range": {
"start": {"line": 30, "character": 17},
"end": {"line": 30, "character": 19},
},
"newText": "$mylambda",
},
"data": {
"filename": "${root_path}/completion.php",
"line": 30,
"char": 15,
},
},
result={
"label": "$mylambda",
"kind": 6,
"detail": "local variable",
"inlineDetail": "(num $n)",
"itemType": "int",
"insertTextFormat": InsertTextFormat.PlainText.value,
"textEdit": {
"range": {
"start": {"line": 30, "character": 17},
"end": {"line": 30, "character": 19},
},
"newText": "$mylambda",
},
"data": {
"filename": "${root_path}/completion.php",
"line": 30,
"char": 15,
},
},
powered_by="serverless_ide",
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=True)
def test_serverless_ide_completion_legacy(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("completion.php"))
self.test_driver.stop_hh_server()
spec = (
self.initialize_spec(
LspTestSpec("serverless_ide_completion_legacy"), use_serverless_ide=True
)
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.notification(
comment="Add '$x = <'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 0},
},
"text": "$x = <",
}
],
},
)
.request(
line=line(),
comment="autocomplete after '$x = <'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 6},
},
result={
"isIncomplete": False,
"items": [
{
"label": "ab:cd:alpha",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"sortText": "ab:cd:alpha",
"insertText": "ab:cd:alpha",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:alpha"},
},
{
"label": "ab:cd:text",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"sortText": "ab:cd:text",
"insertText": "ab:cd:text",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:text"},
},
],
},
powered_by="serverless_ide",
)
.notification(
comment="Add '$x = <a'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 6},
},
"text": "$x = <a",
}
],
},
)
.request(
line=line(),
comment="autocomplete after '$x = <a'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 7},
},
result={
"isIncomplete": False,
"items": [
{
"label": "ab:cd:alpha",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"sortText": "ab:cd:alpha",
"insertText": "ab:cd:alpha",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:alpha"},
},
{
"label": "ab:cd:text",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"sortText": "ab:cd:text",
"insertText": "ab:cd:text",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:text"},
},
],
},
powered_by="serverless_ide",
)
.notification(
comment="Add '$x = <ab:'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 7},
},
"text": "$x = <ab:",
}
],
},
)
.request(
line=line(),
comment="autocomplete after '$x = <ab:'.",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 9},
},
result={
"isIncomplete": False,
"items": [
{
"label": "ab:cd:alpha",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"sortText": "ab:cd:alpha",
"insertText": "ab:cd:alpha",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:alpha"},
},
{
"label": "ab:cd:text",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"sortText": "ab:cd:text",
"insertText": "ab:cd:text",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:text"},
},
],
},
powered_by="serverless_ide",
)
.notification(
comment="Add '$x = <ab:cd:text '",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 9},
},
"text": "$x = <ab:cd:text ",
}
],
},
)
.request(
line=line(),
comment="autocomplete after '$x = <ab:cd:text '",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 17},
},
result={
"isIncomplete": False,
"items": [
{
"label": "width",
"kind": 10,
"detail": "?int",
"inlineDetail": "?int",
"sortText": "width",
"insertText": "width",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": ":width",
"filename": "${root_path}/completion_extras.php",
"line": 5,
"char": 27,
"base_class": "\\:ab:cd:text",
},
},
{
"label": "color",
"kind": 10,
"detail": "?string",
"inlineDetail": "?string",
"sortText": "color",
"insertText": "color",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": ":color",
"filename": "${root_path}/completion_extras.php",
"line": 5,
"char": 13,
"base_class": "\\:ab:cd:text",
},
},
],
},
powered_by="serverless_ide",
)
.notification(
comment="Add '$x = <ab:cd:text w'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 17},
},
"text": "$x = <ab:cd:text w",
}
],
},
)
.request(
line=line(),
comment="autocomplete after '$x = <ab:cd:text w'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 18},
},
result={
"isIncomplete": False,
"items": [
{
"label": "width",
"kind": 10,
"detail": "?int",
"inlineDetail": "?int",
"sortText": "width",
"insertText": "width",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": ":width",
"filename": "${root_path}/completion_extras.php",
"line": 5,
"char": 27,
"base_class": "\\:ab:cd:text",
},
},
{
"label": "color",
"kind": 10,
"detail": "?string",
"inlineDetail": "?string",
"sortText": "color",
"insertText": "color",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": ":color",
"filename": "${root_path}/completion_extras.php",
"line": 5,
"char": 13,
"base_class": "\\:ab:cd:text",
},
},
],
},
powered_by="serverless_ide",
)
.notification(
comment="Add '$x = new :''",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 18},
},
"text": "$x = new :",
}
],
},
)
.request(
line=line(),
comment="autocomplete after '$x = new :'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 10},
},
result={
"isIncomplete": False,
"items": [
{
"label": ":ab:cd:alpha",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"sortText": ":ab:cd:alpha",
"insertText": ":ab:cd:alpha",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:alpha"},
},
{
"label": ":ab:cd:text",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"sortText": ":ab:cd:text",
"insertText": ":ab:cd:text",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:text"},
},
],
},
powered_by="serverless_ide",
)
.notification(
comment="Add '$x = new :a'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 10},
},
"text": "$x = new :a",
}
],
},
)
.request(
line=line(),
comment="autocomplete after '$x = new :a'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 11},
},
result={
"isIncomplete": False,
"items": [
{
"label": ":ab:cd:alpha",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"sortText": ":ab:cd:alpha",
"insertText": ":ab:cd:alpha",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:alpha"},
},
{
"label": ":ab:cd:text",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"sortText": ":ab:cd:text",
"insertText": ":ab:cd:text",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:text"},
},
],
},
powered_by="serverless_ide",
)
# Note that this request sent should match the result given in the previous example
.request(
line=line(),
comment="autocomplete resolving after '$x = new :a'",
method="completionItem/resolve",
params={
"label": ":ab:cd:alpha",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"itemType": ":ab:cd:alpha",
"insertText": ":ab:cd:alpha",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:alpha"},
},
result={
"label": ":ab:cd:alpha",
"kind": 7,
"detail": "class",
"inlineDetail": "class",
"itemType": ":ab:cd:alpha",
"documentation": {
"kind": "markdown",
"value": ":ab:cd:alpha docblock",
},
"insertText": ":ab:cd:alpha",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": ":ab:cd:alpha"},
},
powered_by="serverless_ide",
)
.notification(
comment="Add '$x = <ab:cd:text/>; $y = $x->'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 11},
},
"text": "$x = <ab:cd:text/>; $y = $x->",
}
],
},
)
.request(
line=line(),
comment="autocomplete after '$x = <ab:cd:text/>; $y = $x->'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 29},
},
result={
"isIncomplete": False,
"items": [
{
"label": ":width",
"kind": 10,
"detail": "?int",
"inlineDetail": "?int",
"sortText": ":width",
"insertText": ":width",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": ":width",
"filename": "${root_path}/completion_extras.php",
"line": 5,
"char": 27,
"base_class": "\\:ab:cd:text",
},
},
{
"label": ":color",
"kind": 10,
"detail": "?string",
"inlineDetail": "?string",
"sortText": ":color",
"insertText": ":color",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": ":color",
"filename": "${root_path}/completion_extras.php",
"line": 5,
"char": 13,
"base_class": "\\:ab:cd:text",
},
},
],
},
powered_by="serverless_ide",
)
.notification(
comment="Add '$x = <ab:cd:text/>; $y = $x->:'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 29},
},
"text": "$x = <ab:cd:text/>; $y = $x->:",
}
],
},
)
.request(
line=line(),
comment="autocomplete after '$x = <ab:cd:text/>; $y = $x->:'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 30},
},
result={
"isIncomplete": False,
"items": [
{
"label": ":width",
"kind": 10,
"detail": "?int",
"inlineDetail": "?int",
"sortText": ":width",
"insertText": ":width",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": ":width",
"filename": "${root_path}/completion_extras.php",
"line": 5,
"char": 27,
"base_class": "\\:ab:cd:text",
},
},
{
"label": ":color",
"kind": 10,
"detail": "?string",
"inlineDetail": "?string",
"sortText": ":color",
"insertText": ":color",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": ":color",
"filename": "${root_path}/completion_extras.php",
"line": 5,
"char": 13,
"base_class": "\\:ab:cd:text",
},
},
],
},
powered_by="serverless_ide",
)
.notification(
comment="Add 'test_fun'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 30},
},
"text": "test_fun",
}
],
},
)
.request(
line=line(),
comment="autocomplete after 'test_fun'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 8},
},
result={
"isIncomplete": False,
"items": [
{
"label": "test_function",
"kind": 3,
"detail": "function",
"inlineDetail": "function",
"sortText": "test_function",
"insertText": "test_function",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {"fullname": "test_function"},
}
],
},
powered_by="serverless_ide",
)
.request(
line=line(),
comment="autocomplete resolving after 'test_fun'",
method="completionItem/resolve",
params={
"label": "test_function",
"kind": 3,
"detail": "function(): void",
"inlineDetail": "()",
"itemType": "void",
"insertText": "test_function",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"filename": "${root_path}/completion.php",
"line": 8,
"char": 10,
},
},
result={
"label": "test_function",
"kind": 3,
"detail": "function(): void",
"inlineDetail": "()",
"itemType": "void",
"documentation": {
"kind": "markdown",
"value": "test_function docblock.",
},
"insertText": "test_function",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"filename": "${root_path}/completion.php",
"line": 8,
"char": 10,
},
},
powered_by="serverless_ide",
)
.notification(
comment="Add 'switch (Elsa::Alonso) { case Elsa:'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 8},
},
"text": "switch (Elsa::Alonso) { case Elsa:",
}
],
},
)
.request(
line=line(),
comment="autocomplete after 'switch (Elsa::Alonso) { case Elsa:'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 34},
},
result={"isIncomplete": False, "items": []},
powered_by="serverless_ide",
)
.notification(
comment="Add 'switch (Elsa::Alonso) { case Elsa::'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 34},
},
"text": "switch (Elsa::Alonso) { case Elsa::",
}
],
},
)
.request(
line=line(),
comment="autocomplete after 'switch (Elsa::Alonso) { case Elsa::'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 35},
},
result={
"isIncomplete": False,
"items": [
{
"label": "class",
"kind": 21,
"detail": "classname<this>",
"inlineDetail": "classname<this>",
"sortText": "class",
"insertText": "class",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": "class",
"filename": "${root_path}/completion_extras.php",
"line": 13,
"char": 6,
"base_class": "\\Elsa",
},
},
{
"label": "Bard",
"kind": 21,
"detail": "Elsa",
"inlineDetail": "Elsa",
"sortText": "Bard",
"insertText": "Bard",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": "Bard",
"filename": "${root_path}/completion_extras.php",
"line": 13,
"char": 12,
"base_class": "\\Elsa",
},
},
{
"label": "Alonso",
"kind": 21,
"detail": "Elsa",
"inlineDetail": "Elsa",
"sortText": "Alonso",
"insertText": "Alonso",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": "Alonso",
"filename": "${root_path}/completion_extras.php",
"line": 13,
"char": 12,
"base_class": "\\Elsa",
},
},
{
"label": "isValid",
"kind": 2,
"detail": "function(mixed $value): bool",
"inlineDetail": "(mixed $value)",
"itemType": "bool",
"sortText": "isValid",
"insertText": "isValid(${1:\\$value})",
"insertTextFormat": InsertTextFormat.Snippet.value,
"data": {
"fullname": "isValid",
"filename": "${hhi_path}/BuiltinEnum.hhi",
"line": 49,
"char": 32,
"base_class": "\\Elsa",
},
},
{
"label": "getValues",
"kind": 2,
"detail": "function(): darray<string, Elsa>",
"inlineDetail": "()",
"itemType": "darray<string, Elsa>",
"sortText": "getValues",
"insertText": "getValues()",
"insertTextFormat": InsertTextFormat.Snippet.value,
"data": {
"fullname": "getValues",
"filename": "${hhi_path}/BuiltinEnum.hhi",
"line": 34,
"char": 32,
"base_class": "\\Elsa",
},
},
{
"label": "getNames",
"kind": 2,
"detail": "function(): darray<Elsa, string>",
"inlineDetail": "()",
"itemType": "darray<Elsa, string>",
"sortText": "getNames",
"insertText": "getNames()",
"insertTextFormat": InsertTextFormat.Snippet.value,
"data": {
"fullname": "getNames",
"filename": "${hhi_path}/BuiltinEnum.hhi",
"line": 43,
"char": 32,
"base_class": "\\Elsa",
},
},
{
"label": "coerce",
"kind": 2,
"detail": "function(mixed $value): ?Elsa",
"inlineDetail": "(mixed $value)",
"itemType": "?Elsa",
"sortText": "coerce",
"insertText": "coerce(${1:\\$value})",
"insertTextFormat": InsertTextFormat.Snippet.value,
"data": {
"fullname": "coerce",
"filename": "${hhi_path}/BuiltinEnum.hhi",
"line": 56,
"char": 32,
"base_class": "\\Elsa",
},
},
{
"label": "assertAll",
"kind": 2,
"detail": "function(Traversable<mixed> $values): Container<Elsa>",
"inlineDetail": "(Traversable<mixed> $values)",
"itemType": "Container<Elsa>",
"sortText": "assertAll",
"insertText": "assertAll(${1:\\$values})",
"insertTextFormat": InsertTextFormat.Snippet.value,
"data": {
"fullname": "assertAll",
"filename": "${hhi_path}/BuiltinEnum.hhi",
"line": 70,
"char": 32,
"base_class": "\\Elsa",
},
},
{
"label": "assert",
"kind": 2,
"detail": "function(mixed $value): Elsa",
"inlineDetail": "(mixed $value)",
"itemType": "Elsa",
"sortText": "assert",
"insertText": "assert(${1:\\$value})",
"insertTextFormat": InsertTextFormat.Snippet.value,
"data": {
"fullname": "assert",
"filename": "${hhi_path}/BuiltinEnum.hhi",
"line": 63,
"char": 32,
"base_class": "\\Elsa",
},
},
],
},
powered_by="serverless_ide",
)
.notification(
comment="Add 'switch (Elsa::Alonso) { case Elsa::Alonso:'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 35},
},
"text": "switch (Elsa::Alonso) { case Elsa::Alonso:",
}
],
},
)
.request(
line=line(),
comment="autocomplete after 'switch (Elsa::Alonso) { case Elsa::Alonso:'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 42},
},
result={"isIncomplete": False, "items": []},
powered_by="serverless_ide",
)
.notification(
comment="Add 'DeprecatedClass::'",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 3, "character": 0},
"end": {"line": 3, "character": 41},
},
"text": "DeprecatedClass::",
}
],
},
)
.request(
line=line(),
comment="autocomplete after 'DeprecatedClass::'",
method="textDocument/completion",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 17},
},
result={
"isIncomplete": False,
"items": [
{
"label": "class",
"kind": 21,
"detail": "classname<this>",
"inlineDetail": "classname<this>",
"sortText": "class",
"insertText": "class",
"insertTextFormat": InsertTextFormat.PlainText.value,
"data": {
"fullname": "class",
"filename": "${root_path}/completion_extras.php",
"line": 18,
"char": 13,
"base_class": "\\DeprecatedClass",
},
},
{
"label": "test_do_not_use",
"kind": 2,
"detail": "function(): void",
"inlineDetail": "()",
"itemType": "void",
"sortText": "~test_do_not_use",
"insertText": "test_do_not_use()",
"insertTextFormat": InsertTextFormat.Snippet.value,
"data": {
"fullname": "test_do_not_use",
"filename": "${root_path}/completion_extras.php",
"line": 22,
"char": 26,
"base_class": "\\DeprecatedClass",
},
},
{
"label": "getName",
"kind": 2,
"detail": "function(): void",
"inlineDetail": "()",
"itemType": "void",
"sortText": "getName",
"insertText": "getName()",
"insertTextFormat": InsertTextFormat.Snippet.value,
"data": {
"fullname": "getName",
"filename": "${root_path}/completion_extras.php",
"line": 19,
"char": 26,
"base_class": "\\DeprecatedClass",
},
},
{
"label": "getAttributes_DO_NOT_USE",
"kind": 2,
"detail": "function(): void",
"inlineDetail": "()",
"itemType": "void",
"sortText": "~getAttributes_DO_NOT_USE",
"insertText": "getAttributes_DO_NOT_USE()",
"insertTextFormat": InsertTextFormat.Snippet.value,
"data": {
"fullname": "getAttributes_DO_NOT_USE",
"filename": "${root_path}/completion_extras.php",
"line": 21,
"char": 26,
"base_class": "\\DeprecatedClass",
},
},
{
"label": "__getLoader",
"kind": 2,
"detail": "function(): void",
"inlineDetail": "()",
"itemType": "void",
"sortText": "~__getLoader",
"insertText": "__getLoader()",
"insertTextFormat": InsertTextFormat.Snippet.value,
"data": {
"fullname": "__getLoader",
"filename": "${root_path}/completion_extras.php",
"line": 20,
"char": 26,
"base_class": "\\DeprecatedClass",
},
},
],
},
powered_by="serverless_ide",
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=True)
def test_serverless_ide_definition(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("definition.php"))
self.test_driver.stop_hh_server()
spec = (
self.initialize_spec(
LspTestSpec("serverless_ide_definition"), use_serverless_ide=True
)
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.request(
line=line(),
comment="call to `b_definition`",
method="textDocument/definition",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 10},
},
result=[
{
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 6, "character": 9},
"end": {"line": 6, "character": 21},
},
"title": "b_definition",
}
],
powered_by="serverless_ide",
)
.request(
line=line(),
comment="call to `new BB(1)`",
method="textDocument/definition",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 29, "character": 13},
},
result=[
{
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 11, "character": 18},
"end": {"line": 11, "character": 29},
},
"title": "BB::__construct",
}
],
powered_by="serverless_ide",
)
.request(
line=line(),
comment="call to `new CC(1)`",
method="textDocument/definition",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 30, "character": 13},
},
result=[
{
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 14, "character": 6},
"end": {"line": 14, "character": 8},
},
"title": "CC",
},
{
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 11, "character": 18},
"end": {"line": 11, "character": 29},
},
"title": "BB::__construct",
},
],
powered_by="serverless_ide",
)
.request(
line=line(),
comment="call to `new DD(1)`",
method="textDocument/definition",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 31, "character": 13},
},
result=[
{
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 17, "character": 6},
"end": {"line": 17, "character": 8},
},
"title": "DD",
},
{
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 11, "character": 18},
"end": {"line": 11, "character": 29},
},
"title": "BB::__construct",
},
],
powered_by="serverless_ide",
)
.request(
line=line(),
comment="call to `new EE(1)`",
method="textDocument/definition",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 32, "character": 13},
},
result=[
{
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 21, "character": 18},
"end": {"line": 21, "character": 29},
},
"title": "EE::__construct",
}
],
powered_by="serverless_ide",
)
.request(
line=line(),
comment="call to `new FF(1)`",
method="textDocument/definition",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 33, "character": 13},
},
result=[
{
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 26, "character": 6},
"end": {"line": 26, "character": 8},
},
"title": "FF",
}
],
powered_by="serverless_ide",
)
.request(
line=line(),
comment="call to `new TakesString(HasString::MyString)`",
method="textDocument/definition",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 45, "character": 23},
},
result=[
{
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 40, "character": 6},
"end": {"line": 40, "character": 15},
},
"title": "HasString",
}
],
powered_by="serverless_ide",
)
.notification(
comment="make local, unsaved change to the file",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}", "version": 2},
"contentChanges": [
{
"text": "test",
"range": {
"start": {"line": 3, "character": 9},
"end": {"line": 3, "character": 21},
},
}
],
},
)
.request(
line=line(),
comment="call to `test` instead of `b_definition`",
method="textDocument/definition",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 10},
},
result=[
{
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 28, "character": 9},
"end": {"line": 28, "character": 13},
},
"title": "test",
}
],
powered_by="serverless_ide",
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=True)
def test_serverless_ide_overridden_definition(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("override.php"))
self.test_driver.stop_hh_server()
spec = (
self.initialize_spec(
LspTestSpec("serverless_ide_overridden_definition"),
use_serverless_ide=True,
)
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.request(
line=line(),
comment="find overridden method from trait. It's arbitrary which one we pick. This test embodies current (alphabetical) implementation.",
method="textDocument/definition",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 13, "character": 5},
},
result=[
{
"uri": "file://${root_path}/override.php",
"range": {
"start": {"line": 7, "character": 18},
"end": {"line": 7, "character": 21},
},
"title": "MyTrait::foo",
}
],
powered_by="serverless_ide",
)
.request(
line=line(),
comment="find overridden static method. It's arbitrary which one we pick. This test embodies current (alphabetical) implementation.",
method="textDocument/definition",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 26, "character": 5},
},
result=[
{
"uri": "file://${root_path}/override.php",
"range": {
"start": {"line": 23, "character": 25},
"end": {"line": 23, "character": 28},
},
"title": "C2::bar",
}
],
powered_by="serverless_ide",
)
.request(
line=line(),
comment="find overridden interface method",
method="textDocument/definition",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 35, "character": 5},
},
result=[
{
"uri": "file://${root_path}/override.php",
"range": {
"start": {"line": 32, "character": 18},
"end": {"line": 32, "character": 22},
},
"title": "I1::quux",
}
],
powered_by="serverless_ide",
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=True)
def test_serverless_ide_document_symbol(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("definition.php"))
self.test_driver.stop_hh_server()
spec = (
self.initialize_spec(
LspTestSpec("serverless_ide_document_symbol"), use_serverless_ide=True
)
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.request(
line=line(),
comment="documentSymbol call",
method="textDocument/documentSymbol",
params={"textDocument": {"uri": "${php_file_uri}"}},
result=[
{
"name": "testClassMemberInsideConstructorInvocation",
"kind": 12,
"location": {
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 44, "character": 0},
"end": {"line": 46, "character": 1},
},
},
},
{
"name": "MyString",
"kind": 14,
"location": {
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 41, "character": 8},
"end": {"line": 41, "character": 29},
},
},
"containerName": "HasString",
},
{
"name": "HasString",
"kind": 5,
"location": {
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 40, "character": 0},
"end": {"line": 42, "character": 1},
},
},
},
{
"name": "__construct",
"kind": 6,
"location": {
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 37, "character": 2},
"end": {"line": 37, "character": 43},
},
},
"containerName": "TakesString",
},
{
"name": "TakesString",
"kind": 5,
"location": {
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 36, "character": 0},
"end": {"line": 38, "character": 1},
},
},
},
{
"name": "FF",
"kind": 5,
"location": {
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 26, "character": 0},
"end": {"line": 26, "character": 11},
},
},
},
{
"name": "__construct",
"kind": 6,
"location": {
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 21, "character": 2},
"end": {"line": 23, "character": 3},
},
},
"containerName": "EE",
},
{
"name": "EE",
"kind": 5,
"location": {
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 20, "character": 0},
"end": {"line": 24, "character": 1},
},
},
},
{
"name": "CC",
"kind": 5,
"location": {
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 14, "character": 0},
"end": {"line": 15, "character": 1},
},
},
},
{
"name": "__construct",
"kind": 6,
"location": {
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 11, "character": 2},
"end": {"line": 11, "character": 40},
},
},
"containerName": "BB",
},
{
"name": "BB",
"kind": 5,
"location": {
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 10, "character": 0},
"end": {"line": 12, "character": 1},
},
},
},
{
"name": "a_definition",
"kind": 12,
"location": {
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 2, "character": 0},
"end": {"line": 4, "character": 1},
},
},
},
{
"name": "b_definition",
"kind": 12,
"location": {
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 6, "character": 0},
"end": {"line": 8, "character": 1},
},
},
},
{
"name": "DD",
"kind": 5,
"location": {
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 17, "character": 0},
"end": {"line": 18, "character": 1},
},
},
},
{
"name": "test",
"kind": 12,
"location": {
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 28, "character": 0},
"end": {"line": 34, "character": 1},
},
},
},
],
powered_by="serverless_ide",
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=True)
def initialize_spec(
self,
spec: LspTestSpec,
use_serverless_ide: bool,
supports_status: bool = False, # does the caller wish to see all status messages?
supports_init: bool = False, # do we wish to interact with init, rather than waiting for init ok?
) -> LspTestSpec:
if use_serverless_ide:
initialization_options = {
"namingTableSavedStatePath": "${naming_table_saved_state_path}",
"namingTableSavedStateTestDelay": 0.0,
}
if supports_init:
# A small delay, since otherwise init completes immediately
# This isn't very racy. All we need is a tiny delay so that
# other things which are in the queue get processed, rather
# than continuing synchronously
initialization_options["namingTableSavedStateTestDelay"] = 0.5
else:
initialization_options = {}
window_capabilities = {}
if supports_status:
window_capabilities["status"] = {"dynamicRegistration": False}
spec = spec.ignore_notifications(method="telemetry/event").request(
line=line(),
method="initialize",
params={
"initializationOptions": initialization_options,
"processId": None,
"rootPath": "${root_path}",
"capabilities": {
"window": window_capabilities,
"textDocument": {
"completion": {"completionItem": {"snippetSupport": True}}
},
},
},
result={
"capabilities": {
"textDocumentSync": {
"openClose": True,
"change": 2,
"willSave": False,
"willSaveWaitUntil": False,
"save": {"includeText": False},
},
"hoverProvider": True,
"completionProvider": {
"resolveProvider": True,
"triggerCharacters": ["$", ">", "\\", ":", "<", "[", "'", '"'],
},
"signatureHelpProvider": {"triggerCharacters": ["(", ","]},
"definitionProvider": True,
"typeDefinitionProvider": True,
"referencesProvider": True,
"documentHighlightProvider": True,
"documentSymbolProvider": True,
"workspaceSymbolProvider": True,
"codeActionProvider": False,
"documentFormattingProvider": True,
"documentRangeFormattingProvider": True,
"documentOnTypeFormattingProvider": {
"firstTriggerCharacter": ";",
"moreTriggerCharacter": ["}"],
},
"renameProvider": True,
"implementationProvider": True,
"typeCoverageProvider": True,
"rageProvider": True,
}
},
)
if use_serverless_ide:
spec = spec.wait_for_server_request(
method="client/registerCapability",
params={
"registrations": [
{
"id": "did-change-watched-files",
"method": "workspace/didChangeWatchedFiles",
"registerOptions": {
"watchers": [{"globPattern": "**", "kind": 7}]
},
}
]
},
result=None,
)
if not supports_status:
spec = spec.ignore_status_diagnostics(True)
if use_serverless_ide and not supports_init:
spec = spec.wait_for_notification(
comment="wait for sIDE to finish init",
method="telemetry/event",
params={"type": 4, "message": "[client-ide] Finished init: ok"},
)
return spec
def test_serverless_ide_type_definition(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("type_definition.php"))
self.test_driver.stop_hh_server()
spec = (
self.initialize_spec(
LspTestSpec("serverless_ide_type_definition"), use_serverless_ide=True
)
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.request(
line=line(),
comment="Conditional Type Definition of HH or II",
method="textDocument/typeDefinition",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 32, "character": 2},
},
result=[
{
"uri": "${php_file_uri}",
"range": {
"start": {"line": 2, "character": 6},
"end": {"line": 2, "character": 8},
},
"title": "\\HH",
},
{
"uri": "${php_file_uri}",
"range": {
"start": {"line": 12, "character": 6},
"end": {"line": 12, "character": 8},
},
"title": "\\LL",
},
],
powered_by="serverless_ide",
)
.request(
line=line(),
comment="Standard Class Definition",
method="textDocument/typeDefinition",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 40, "character": 2},
},
result=[
{
"uri": "${php_file_uri}",
"range": {
"start": {"line": 2, "character": 6},
"end": {"line": 2, "character": 8},
},
"title": "\\HH",
}
],
powered_by="serverless_ide",
)
.request(
line=line(),
comment="Class Type Definition with Casting",
method="textDocument/typeDefinition",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 41, "character": 2},
},
result=[
{
"uri": "${php_file_uri}",
"range": {
"start": {"line": 2, "character": 6},
"end": {"line": 2, "character": 8},
},
"title": "\\HH",
}
],
powered_by="serverless_ide",
)
.request(
line=line(),
comment="Primitive Type Definition",
method="textDocument/typeDefinition",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 42, "character": 2},
},
result=[],
powered_by="serverless_ide",
)
.request(
line=line(),
comment="Function Return Type Definition",
method="textDocument/typeDefinition",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 43, "character": 2},
},
result=[
{
"uri": "${php_file_uri}",
"range": {
"start": {"line": 12, "character": 6},
"end": {"line": 12, "character": 8},
},
"title": "\\LL",
}
],
powered_by="serverless_ide",
)
.request(
line=line(),
comment="Function definition with primitive return type",
method="textDocument/typeDefinition",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 44, "character": 2},
},
result=[
{
"uri": "${php_file_uri}",
"range": {
"start": {"line": 22, "character": 9},
"end": {"line": 22, "character": 29},
},
"title": "(function(): int)",
}
],
powered_by="serverless_ide",
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=True)
def test_serverless_ide_hover(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("hover.php"))
self.test_driver.stop_hh_server()
spec = (
self.initialize_spec(
LspTestSpec("serverless_ide_hover"), use_serverless_ide=True
)
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.request(
line=line(),
comment="hover over function invocation",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 16},
},
result={
"contents": [
{"language": "hack", "value": "int"},
"A comment describing b_hover.",
],
"range": {
"start": {"line": 3, "character": 9},
"end": {"line": 3, "character": 16},
},
},
powered_by="serverless_ide",
)
.request(
line=line(),
comment="hover over string literal outside call",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 25, "character": 12}, # 9 - 16
},
result={"contents": [{"language": "hack", "value": "string"}]},
powered_by="serverless_ide",
)
.request(
line=line(),
comment="hover over string literal inside call",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 26, "character": 20}, # 16 - 29
},
result={"contents": [{"language": "hack", "value": "string"}]},
powered_by="serverless_ide",
)
.request(
line=line(),
comment="hover over int literal inside call",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 26, "character": 32}, # 31 - 33
},
result={"contents": [{"language": "hack", "value": "int"}]},
powered_by="serverless_ide",
)
.request(
line=line(),
comment="hover over constant reference",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 15, "character": 19},
},
result={
"contents": [
{"language": "hack", "value": "THE_ANSWER"},
"A comment describing THE_ANSWER",
"int THE_ANSWER = 42",
],
"range": {
"start": {"line": 15, "character": 9},
"end": {"line": 15, "character": 19},
},
},
powered_by="serverless_ide",
)
.request(
line=line(),
comment="hover over whitespace",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 1},
},
result=None,
powered_by="serverless_ide",
)
.request(
line=line(),
comment="hover over a keyword",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 2, "character": 1},
},
result=None,
powered_by="serverless_ide",
)
.request(
line=line(),
comment="hover over a comment",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 1, "character": 4},
},
result=None,
powered_by="serverless_ide",
)
.request(
line=line(),
comment="hover past the end of a line",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 100},
},
result=None,
powered_by="serverless_ide",
)
.request(
line=line(),
comment="hover past the end of a file",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 300, "character": 0},
},
result=None,
powered_by="serverless_ide",
)
.request(
line=line(),
comment="hover over class with copyright docblock",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 37, "character": 15},
},
result={
"contents": [
{"language": "hack", "value": "final class CopyrightClass"},
"Testing copyright removal",
],
"range": {
"start": {"line": 37, "character": 2},
"end": {"line": 37, "character": 16},
},
},
powered_by="serverless_ide",
)
.request(
line=line(),
comment="hover over class with generated docblock",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 58, "character": 15},
},
result={
"contents": [
{"language": "hack", "value": "final class GeneratedClass"},
"Testing generated text removal",
],
"range": {
"start": {"line": 58, "character": 2},
"end": {"line": 58, "character": 16},
},
},
powered_by="serverless_ide",
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=True)
def test_serverless_ide_file_touched_on_disk(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("hover.php"))
self.test_driver.stop_hh_server()
spec = (
self.initialize_spec(
LspTestSpec("serverless_ide_file_on_disk_change"),
use_serverless_ide=True,
)
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.notification(
method="workspace/didChangeWatchedFiles",
params={"changes": [{"uri": "${php_file_uri}", "type": 2}]},
)
.wait_for_notification(
comment="wait for sIDE to process file change",
method="telemetry/event",
params={
"type": 4,
"message": "[client-ide] Done processing file changes",
},
)
.request(
line=line(),
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 16},
},
result={
"contents": [
{"language": "hack", "value": "int"},
"A comment describing b_hover.",
],
"range": {
"start": {"line": 3, "character": 9},
"end": {"line": 3, "character": 16},
},
},
powered_by="serverless_ide",
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=True)
def test_serverless_ide_file_hover_with_errors(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("hover_with_errors.php"))
self.test_driver.stop_hh_server()
spec = (
self.initialize_spec(
LspTestSpec("serverless_ide_hover_with_errors"), use_serverless_ide=True
)
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.notification(
method="workspace/didChangeWatchedFiles",
params={"changes": [{"uri": "${php_file_uri}", "type": 2}]},
)
.wait_for_notification(
comment="wait for sIDE to process file change",
method="telemetry/event",
params={
"type": 4,
"message": "[client-ide] Done processing file changes",
},
)
.request(
line=line(),
comment="Totally normal hover",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 14, "character": 37},
},
result={
"contents": [
{
"language": "hack",
"value": "public static function staticMethod(string $z): void",
},
'During testing, we\'ll remove the "public" tag from this '
"method\n"
"to ensure that we can still get IDE services",
"Return type: `void`",
"Full name: `HoverWithErrorsClass::staticMethod`",
],
"range": {
"end": {"character": 39, "line": 14},
"start": {"character": 27, "line": 14},
},
},
powered_by="serverless_ide",
)
.notification(
comment="Remove the 'public' visibility modifier which triggers AST->AAST errors",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 10, "character": 2},
"end": {"line": 10, "character": 8},
},
"text": "",
}
],
},
)
.request(
line=line(),
comment="Hover should still work even if visibility modifier has been removed",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 14, "character": 37},
},
result={
"contents": [
{
"language": "hack",
"value": "public static function staticMethod(string $z): void",
},
'During testing, we\'ll remove the "public" tag from this '
"method\n"
"to ensure that we can still get IDE services",
"Return type: `void`",
"Full name: `HoverWithErrorsClass::staticMethod`",
],
"range": {
"end": {"character": 39, "line": 14},
"start": {"character": 27, "line": 14},
},
},
powered_by="serverless_ide",
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=True)
def test_serverless_ide_formatting(self) -> None:
# This test will fail if hackfmt can't be found
if not self.test_driver.run_hackfmt_check():
raise unittest.SkipTest("Hackfmt can't be found. Skipping.")
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("messy.php"))
self.test_driver.stop_hh_server()
spec = (
self.initialize_spec(LspTestSpec("formatting"), use_serverless_ide=True)
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.request(
line=line(),
method="textDocument/formatting",
params={
"textDocument": {"uri": "${php_file_uri}"},
"options": {"tabSize": 5, "insertSpaces": True},
},
result=[
{
"range": {
"start": {"line": 0, "character": 0},
"end": {"line": 15, "character": 0},
},
"newText": "<?hh //strict\n\nfunction x(): string {\n"
+ " /* @lint-ignore TXT2 3 tabs on purpose */\n"
+ ' $a = "this";\n\n'
+ " /* @lint-ignore TXT2 2 tabs on purpose */\n"
+ ' $b = "is";\n\n'
+ " /* lint-ignore TXT2 1 tab on purpose */\n"
+ ' $c = "messy"; // 1 tab\n\n'
+ ' $d = "."; // 4 spaces\n'
+ ' return "$a"."$b"."$c"."d";\n}\n',
}
],
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=True)
def test_serverless_ide_rangeformatting(self) -> None:
# This test will fail if hackfmt can't be found
if not self.test_driver.run_hackfmt_check():
raise unittest.SkipTest("Hackfmt can't be found. Skipping.")
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("messy.php"))
self.test_driver.stop_hh_server()
spec = (
self.initialize_spec(
LspTestSpec("range_formatting"), use_serverless_ide=True
)
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.request(
line=line(),
method="textDocument/rangeFormatting",
params={
"textDocument": {"uri": "${php_file_uri}"},
"range": {
"start": {"line": 4, "character": 0},
"end": {"line": 5, "character": 0},
},
"options": {"tabSize": 5, "insertSpaces": True},
},
result=[
{
"range": {
"start": {"line": 4, "character": 0},
"end": {"line": 5, "character": 0},
},
"newText": ' $a = "this";\n',
}
],
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=True)
def test_serverless_ide_ontypeformatting(self) -> None:
# This test will fail if hackfmt can't be found
if not self.test_driver.run_hackfmt_check():
raise unittest.SkipTest("Hackfmt can't be found. Skipping.")
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("ontypeformatting.php"))
spec = (
self.initialize_spec(
LspTestSpec("ontypeformatting"), use_serverless_ide=True
)
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.request(
line=line(),
method="textDocument/onTypeFormatting",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 9, "character": 58},
"ch": ";",
"options": {"tabSize": 2, "insertSpaces": True},
},
result=[
{
"range": {
"start": {"line": 5, "character": 17},
"end": {"line": 9, "character": 58},
},
"newText": "{\n test_otf(\n"
+ " '1234567890',\n"
+ " '1234567890',\n"
+ " '1234567890',\n"
+ " '1234567890',\n"
+ " '1234567890',\n"
+ " '1234567890',\n );",
}
],
)
.request(
line=line(),
method="textDocument/onTypeFormatting",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 13, "character": 1},
"ch": "}",
"options": {"tabSize": 2, "insertSpaces": True},
},
result=[
{
"range": {
"start": {"line": 13, "character": 0},
"end": {"line": 13, "character": 1},
},
"newText": "{",
}
],
)
.request(
line=line(),
method="textDocument/onTypeFormatting",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 15, "character": 16},
"ch": "}",
"options": {"tabSize": 2, "insertSpaces": True},
},
result=[
{
"range": {
"start": {"line": 15, "character": 0},
"end": {"line": 15, "character": 16},
},
"newText": "function otf() {}",
}
],
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=True)
def test_did_change(self) -> None:
self.prepare_server_environment()
variables = self.setup_php_file("didchange.php")
spec = (
self.initialize_spec(LspTestSpec("did_change"), use_serverless_ide=False)
.wait_for_hh_server_ready()
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.notification(
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 7, "character": 11},
"end": {"line": 7, "character": 12},
},
"text": "a",
}
],
},
)
.wait_for_notification(
method="textDocument/publishDiagnostics",
params={
"uri": "${php_file_uri}",
"diagnostics": [
{
"range": {
"start": {"line": 7, "character": 11},
"end": {"line": 7, "character": 11},
},
"severity": 1,
"code": 1002,
"source": "Hack",
"message": "A semicolon ; is expected here.",
"relatedLocations": [],
"relatedInformation": [],
}
],
},
)
.request(line=line(), method="shutdown", params={}, result=None)
.wait_for_notification(
comment="Hack appears to clear out diagnostics before shutting down",
method="textDocument/publishDiagnostics",
params={"uri": "${php_file_uri}", "diagnostics": []},
)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=True, use_serverless_ide=False)
def test_go_to_implementation(self) -> None:
self.prepare_server_environment()
variables = self.setup_php_file("go_to_implementation.php")
spec = (
self.initialize_spec(
LspTestSpec("test_go_to_implementation"), use_serverless_ide=False
)
.wait_for_hh_server_ready()
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.request(
line=line(),
comment="go to implemenetation: abstract class",
method="textDocument/implementation",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 1, "character": 17},
},
result=[
{
"uri": "${php_file_uri}",
"range": {
"start": {"line": 7, "character": 6},
"end": {"line": 7, "character": 9},
},
}
],
)
.request(
line=line(),
comment="go to implemenetation: interface",
method="textDocument/implementation",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 13, "character": 13},
},
result=[
{
"uri": "${php_file_uri}",
"range": {
"start": {"line": 17, "character": 6},
"end": {"line": 17, "character": 9},
},
}
],
)
.request(
line=line(),
comment="go to implemenetation: trait",
method="textDocument/implementation",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 23, "character": 10},
},
result=[
{
"uri": "${php_file_uri}",
"range": {
"start": {"line": 30, "character": 6},
"end": {"line": 30, "character": 16},
},
}
],
)
.request(
line=line(),
comment="go to implemenetation: method",
method="textDocument/implementation",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 19, "character": 18},
},
result=[
{
"uri": "${php_file_uri}",
"range": {
"start": {"line": 8, "character": 18},
"end": {"line": 8, "character": 22},
},
}
],
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=True, use_serverless_ide=False)
def test_signature_help(self) -> None:
self.prepare_server_environment()
variables = self.setup_php_file("signaturehelp.php")
spec = (
self.initialize_spec(
LspTestSpec("test_signature_help"), use_serverless_ide=False
)
.wait_for_hh_server_ready()
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.request(
line=line(),
comment="signature help for 0-argument constructor"
" (left of opening paren)",
method="textDocument/signatureHelp",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 16, "character": 18},
},
result=None,
)
.request(
line=line(),
comment="signature help for 0-argument constructor",
method="textDocument/signatureHelp",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 16, "character": 19},
},
result={
"signatures": [
{
"label": "public function __construct(): void",
"documentation": "Constructor with doc block",
"parameters": [],
}
],
"activeSignature": 0,
"activeParameter": 0,
},
)
.request(
line=line(),
comment="signature help for 0-argument constructor"
" (right of closing paren)",
method="textDocument/signatureHelp",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 16, "character": 20},
},
result=None,
)
.request(
line=line(),
comment="signature help for 2-argument instance method"
" (left of opening paren)",
method="textDocument/signatureHelp",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 17, "character": 20},
},
result=None,
)
.request(
line=line(),
comment="signature help for 2-argument instance method"
" (right of opening paren)",
method="textDocument/signatureHelp",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 17, "character": 21},
},
result={
"signatures": [
{
"label": "public function instanceMethod"
"(int $x1, int $x2): void",
"documentation": "Instance method with doc block",
"parameters": [{"label": "$x1"}, {"label": "$x2"}],
}
],
"activeSignature": 0,
"activeParameter": 0,
},
)
.request(
line=line(),
comment="signature help for 2-argument instance method"
" (left of first comma)",
method="textDocument/signatureHelp",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 17, "character": 22},
},
result={
"signatures": [
{
"label": "public function instanceMethod"
"(int $x1, int $x2): void",
"documentation": "Instance method with doc block",
"parameters": [{"label": "$x1"}, {"label": "$x2"}],
}
],
"activeSignature": 0,
"activeParameter": 1,
},
)
.request(
line=line(),
comment="signature help for 2-argument instance method"
" (right of first comma)",
method="textDocument/signatureHelp",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 17, "character": 23},
},
result={
"signatures": [
{
"label": "public function instanceMethod"
"(int $x1, int $x2): void",
"documentation": "Instance method with doc block",
"parameters": [{"label": "$x1"}, {"label": "$x2"}],
}
],
"activeSignature": 0,
"activeParameter": 1,
},
)
.request(
line=line(),
comment="signature help for 2-argument instance method"
" (left of closing paren)",
method="textDocument/signatureHelp",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 17, "character": 24},
},
result={
"signatures": [
{
"label": "public function instanceMethod"
"(int $x1, int $x2): void",
"documentation": "Instance method with doc block",
"parameters": [{"label": "$x1"}, {"label": "$x2"}],
}
],
"activeSignature": 0,
"activeParameter": 1,
},
)
.request(
line=line(),
comment="signature help for 2-argument instance method"
" (right of closing paren)",
method="textDocument/signatureHelp",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 17, "character": 25},
},
result=None,
)
.request(
line=line(),
comment="signature help for 1-argument static method"
" (left of open paren)",
method="textDocument/signatureHelp",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 18, "character": 23},
},
result=None,
)
.request(
line=line(),
comment="signature help for 1-argument static method"
" (right of open paren)",
method="textDocument/signatureHelp",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 18, "character": 24},
},
result={
"signatures": [
{
"label": "public static function staticMethod"
"(string $z): void",
"documentation": "Static method with doc block",
"parameters": [{"label": "$z"}],
}
],
"activeSignature": 0,
"activeParameter": 0,
},
)
.request(
line=line(),
comment="signature help for 2-argument global function"
" (left of open paren)",
method="textDocument/signatureHelp",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 19, "character": 17},
},
result=None,
)
.request(
line=line(),
comment="signature help for 2-argument global function"
" (right of open paren)",
method="textDocument/signatureHelp",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 19, "character": 18},
},
result={
"signatures": [
{
"label": "function global_function"
"(string $s, int $x): void",
"documentation": "Global function with doc block",
"parameters": [{"label": "$s"}, {"label": "$x"}],
}
],
"activeSignature": 0,
"activeParameter": 0,
},
)
.request(
line=line(),
comment="signature help for 1-argument namespace-aliased global"
" function (right of open paren)",
method="textDocument/signatureHelp",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 20, "character": 26},
},
result=None,
)
.request(
line=line(),
comment="signature help for 1-argument namespace-aliased global"
" function (right of open paren)",
method="textDocument/signatureHelp",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 20, "character": 26},
},
result=None,
)
.request(
line=line(),
comment="signature help for 1-argument namespace-aliased global"
" function (right of open paren)",
method="textDocument/signatureHelp",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 20, "character": 27},
},
result={
"signatures": [
{
"label": "function Derp\\Lib\\Herp\\aliased_global_func(string $s): void",
"documentation": "Namespace-aliased function with doc block",
"parameters": [{"label": "$s"}],
}
],
"activeSignature": 0,
"activeParameter": 0,
},
)
.request(
line=line(),
comment="signature help for 1-argument namespace-aliased global"
" function (right of open paren)",
method="textDocument/signatureHelp",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 20, "character": 28},
},
result={
"signatures": [
{
"label": "function Derp\\Lib\\Herp\\aliased_global_func(string $s): void",
"documentation": "Namespace-aliased function with doc block",
"parameters": [{"label": "$s"}],
}
],
"activeSignature": 0,
"activeParameter": 0,
},
)
.request(
line=line(),
comment="signature help for 2-argument function with params"
" (right of open paren)",
method="textDocument/signatureHelp",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 21, "character": 30},
},
result={
"signatures": [
{
"label": "function test_signature_help_params1("
"\n string $param1,\n string $param2\n): void",
"documentation": "comment describing the method"
"\n@param $param1 info1"
"\n@param param2 info2",
"parameters": [
{"label": "$param1", "documentation": "info1"},
{"label": "$param2", "documentation": "info2"},
],
}
],
"activeSignature": 0,
"activeParameter": 0,
},
)
.request(
line=line(),
comment="signature help for 2-argument function with params"
" (right of open paren)",
method="textDocument/signatureHelp",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 22, "character": 30},
},
result={
"signatures": [
{
"label": "function test_signature_help_params2("
"\n string $param1,\n string $param2\n): void",
"documentation": "comment describing the method"
"\n@param $param1 info1",
"parameters": [
{"label": "$param1", "documentation": "info1"},
{"label": "$param2"},
],
}
],
"activeSignature": 0,
"activeParameter": 0,
},
)
.request(
line=line(),
comment="signature help for 2-argument function with params"
" (right of open paren)",
method="textDocument/signatureHelp",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 23, "character": 30},
},
result={
"signatures": [
{
"label": "function test_signature_help_params3("
"\n string $param1,\n string $param2\n): string",
"documentation": "@param $param1 info1"
"\n for param1"
"\n@param $param2 info2"
"\n@return the string"
"\n 'hack'",
"parameters": [
{
"label": "$param1",
"documentation": "info1 for param1",
},
{"label": "$param2", "documentation": "info2"},
],
}
],
"activeSignature": 0,
"activeParameter": 0,
},
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=True, use_serverless_ide=False)
def test_signature_help_lambda(self) -> None:
self.prepare_server_environment()
variables = self.setup_php_file("signaturehelp_lambda.php")
spec = (
self.initialize_spec(
LspTestSpec("test_serverless_ide_signature_help_lambda"),
use_serverless_ide=False,
)
.wait_for_hh_server_ready()
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.request(
line=line(),
comment="signature help for a normal function call",
method="textDocument/signatureHelp",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 8, "character": 29},
},
result={
"activeParameter": 0,
"activeSignature": 0,
"signatures": [
{
"label": "function test_lambda_sighelp(\n"
" string $str,\n"
" (function(string): int) $f\n"
"): int",
"parameters": [{"label": "$str"}, {"label": "$f"}],
}
],
},
)
.request(
line=line(),
comment="signature help for normal function call within a lambda",
method="textDocument/signatureHelp",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 9, "character": 21},
},
result={
"activeParameter": 0,
"activeSignature": 0,
"signatures": [
{
"label": "function normal_test_func(string $str): void",
"parameters": [{"label": "$str"}],
}
],
},
)
.request(
line=line(),
comment="signature help for text within a lambda, left side of an open paren",
method="textDocument/signatureHelp",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 10, "character": 15},
},
result=None,
)
.request(
line=line(),
comment="signature help for text within a lambda, right side of an open paren",
method="textDocument/signatureHelp",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 10, "character": 16},
},
result=None,
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=True, use_serverless_ide=False)
def test_rename(self) -> None:
self.prepare_server_environment()
variables = self.setup_php_file("rename.php")
self.load_and_run("rename", variables)
def test_references(self) -> None:
self.prepare_server_environment()
variables = self.setup_php_file("references.php")
self.load_and_run("references", variables)
def test_non_existing_method(self) -> None:
self.prepare_server_environment()
variables = self.setup_php_file("nomethod.php")
self.load_and_run("nomethod", variables)
def test_bad_call(self) -> None:
self.prepare_server_environment()
variables = self.setup_php_file("bad_call.php")
self.load_and_run("bad_call", variables)
def test_non_blocking(self) -> None:
self.prepare_server_environment()
variables = self.setup_php_file("non_blocking.php")
self.test_driver.start_hh_loop_forever_assert_timeout()
spec = (
self.initialize_spec(LspTestSpec("non_blocking"), use_serverless_ide=False)
.wait_for_hh_server_ready()
.request(
line=line(),
method="textDocument/definition",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 7, "character": 11},
},
result=[
{
"uri": "file://${root_path}/non_blocking.php",
"range": {
"start": {"line": 2, "character": 9},
"end": {"line": 2, "character": 32},
},
"title": "non_blocking_definition",
}
],
wait_id="definition request",
)
.notification(
comment="remove hh_loop_forever() invocation to break the infinite loop",
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${root_path}/__hh_loop_forever_foo.php",
"languageId": "hack",
"version": 1,
"text": """\
<?hh // strict
function __hh_loop_forever_foo(): int {
return 4;
}
""",
}
},
)
.wait_for_response(wait_id="definition request")
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=True, use_serverless_ide=False)
def test_serverless_ide_hierarchy_file_change_on_disk(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("incremental_derived.php"))
changed_php_file_uri = self.repo_file("incremental_base.php")
variables.update({"changed_php_file_uri": changed_php_file_uri})
self.test_driver.stop_hh_server()
spec = (
self.initialize_spec(
LspTestSpec("serverless_ide_hierarchy_file_change_on_disk"),
use_serverless_ide=True,
)
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.request(
line=line(),
comment="hover before change to class hierarchy should be `int`",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 7, "character": 14},
},
result={
"contents": [
{"language": "hack", "value": "public function foo(): int"},
"Return type: `int`",
"Full name: `BaseClassIncremental::foo`",
],
"range": {
"start": {"line": 7, "character": 12},
"end": {"line": 7, "character": 15},
},
},
powered_by="serverless_ide",
)
.write_to_disk(
uri=changed_php_file_uri,
contents="""\
<?hh // strict
class BaseClassIncremental {
public function foo(): string { return ''; }
}
""",
notify=True,
)
.request(
line=line(),
comment="hover after change to class hierarchy should be `string`",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 7, "character": 14},
},
result={
"contents": [
{"language": "hack", "value": "public function foo(): string"},
"Return type: `string`",
"Full name: `BaseClassIncremental::foo`",
],
"range": {
"start": {"line": 7, "character": 12},
"end": {"line": 7, "character": 15},
},
},
powered_by="serverless_ide",
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=True)
def test_serverless_ide_decl_in_unsaved_buffer_changed(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("hover.php"))
self.test_driver.stop_hh_server()
spec = (
self.initialize_spec(
LspTestSpec("serverless_ide_decl_in_unsaved_buffer_changed"),
use_serverless_ide=True,
)
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.request(
line=line(),
comment="hover over function invocation",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 16},
},
result={
"contents": [
{"language": "hack", "value": "int"},
"A comment describing b_hover.",
],
"range": {
"start": {"line": 3, "character": 9},
"end": {"line": 3, "character": 16},
},
},
powered_by="serverless_ide",
)
.notification(
comment="make local, unsaved change to the file",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}", "version": 2},
"contentChanges": [
{
"text": """\
<?hh // strict
// comment
function a_hover(): int {
return b_hover();
}
# A comment describing b_hover differently.
function b_hover(): string {
return 42;
}
"""
}
],
},
)
.request(
line=line(),
comment="another hover over function invocation, should be string now",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 16},
},
result={
"contents": [
{"language": "hack", "value": "string"},
"A comment describing b_hover differently.",
],
"range": {
"start": {"line": 3, "character": 9},
"end": {"line": 3, "character": 16},
},
},
powered_by="serverless_ide",
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=True)
def test_serverless_ide_decl_two_unsaved_buffers(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("unsaved1.php"))
variables.update({"unsaved2_file_uri": self.repo_file_uri("unsaved2.php")})
self.test_driver.stop_hh_server()
spec = (
self.initialize_spec(
LspTestSpec("test_serverless_ide_decl_two_unsaved_buffers"),
use_serverless_ide=True,
)
.notification(
comment="open 'unsaved1.php', since we'll be hovering in it",
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.notification(
comment="open 'unsaved2.php' with a bool-returning signature, different from disk",
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${unsaved2_file_uri}",
"languageId": "hack",
"version": 1,
"text": """\
<?hh //strict
function unsaved_bar(): bool { return true; }
""",
}
},
)
.request(
line=line(),
comment="hover 'unsaved1.php' is with respect to disk contents of 'unsaved2.php'",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 1, "character": 39},
},
result={
"contents": [
{"language": "hack", "value": "function unsaved_bar(): int"},
"Return type: `int`",
],
"range": {
"start": {"line": 1, "character": 34},
"end": {"line": 1, "character": 45},
},
},
powered_by="serverless_ide",
)
.notification(
comment="change signature in 'unsaved2.php' to return string",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${unsaved2_file_uri}", "version": 2},
"contentChanges": [
{
"text": """\
<?hh //strict
function unsaved_bar(): string { return "hello"; }
"""
}
],
},
)
.request(
line=line(),
comment="this is a dummy hover in 'unsaved2.php' just to ensure its decl is cached",
method="textDocument/hover",
params={
"textDocument": {"uri": "${unsaved2_file_uri}"},
"position": {"line": 0, "character": 0},
},
result=None,
powered_by="serverless_ide",
)
.request(
line=line(),
comment="hover 'unsaved1.php' is still with respect to disk contents of 'unsaved2.php'",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 1, "character": 39},
},
result={
"contents": [
{"language": "hack", "value": "function unsaved_bar(): int"},
"Return type: `int`",
],
"range": {
"start": {"line": 1, "character": 34},
"end": {"line": 1, "character": 45},
},
},
powered_by="serverless_ide",
)
.write_to_disk(
comment="save signature in 'unsaved2' to return string",
uri=variables["unsaved2_file_uri"],
contents="""\
<?hh // strict
function unsaved_bar(): string { return "hello"; }
""",
notify=True,
)
.request(
line=line(),
comment="hover 'unsaved1.php' gets new disk contents of 'unsaved2.php'",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 1, "character": 39},
},
result={
"contents": [
{"language": "hack", "value": "function unsaved_bar(): string"},
"Return type: `string`",
],
"range": {
"start": {"line": 1, "character": 34},
"end": {"line": 1, "character": 45},
},
},
powered_by="serverless_ide",
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=True)
def test_hover_without_file_open(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("hover.php"))
self.test_driver.stop_hh_server()
spec = (
self.initialize_spec(
LspTestSpec("test_hover_without_file_open"),
use_serverless_ide=True,
supports_status=True,
)
.ignore_notifications(method="textDocument/publishDiagnostics")
.ignore_requests(
comment="Ignore 'initializing...' messages since they're racy",
method="window/showStatus",
params={
"type": 2,
"actions": [{"title": "Restart hh_server"}],
"message": "Hack IDE: initializing.\nhh_server: stopped.",
"shortMessage": "Hack: initializing",
},
)
.ignore_requests(
comment="another racy initializing, before hh_server has even responded",
method="window/showStatus",
params={
"type": 2,
"actions": [],
"message": "Hack IDE: initializing.",
"shortMessage": "Hack: initializing",
},
)
.ignore_requests(
comment="another racy initialization to ignore, again before hh_server",
method="window/showStatus",
params={
"type": 3,
"actions": [],
"message": "Hack IDE: ready.",
"shortMessage": "Hack: ready",
},
)
.wait_for_server_request(
method="window/showStatus",
params={
"actions": [{"title": "Restart hh_server"}],
"message": "Hack IDE: ready.\nhh_server: stopped.",
"shortMessage": "Hack: ready",
"type": 3,
},
result=NoResponse(),
)
.request(
line=line(),
comment="hover before file_open will fail",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 26, "character": 20},
},
result=None,
)
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.request(
line=line(),
comment="hover after file_open will succeed",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 26, "character": 20},
},
result={"contents": [{"language": "hack", "value": "string"}]},
powered_by="serverless_ide",
)
.request(
line=line(),
method="$test/shutdownServerlessIde",
params={},
result=None,
powered_by="serverless_ide",
)
.wait_for_server_request(
method="window/showStatus",
params={
"actions": [
{"title": "Restart Hack IDE"},
{"title": "Restart hh_server"},
],
"message": "Hack IDE has failed. See Output›Hack for details.\nhh_server: stopped.",
"shortMessage": "Hack: failed",
"type": 1,
},
result={"title": "Restart Hack IDE"},
)
.wait_for_server_request(
method="window/showStatus",
params={
"actions": [{"title": "Restart hh_server"}],
"message": "Hack IDE: ready.\nhh_server: stopped.",
"shortMessage": "Hack: ready",
"type": 3,
},
result=NoResponse(),
)
.request(
line=line(),
comment="hover after restart will succeed",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 26, "character": 20},
},
result={"contents": [{"language": "hack", "value": "string"}]},
powered_by="serverless_ide",
)
.notification(
method="textDocument/didClose",
params={"textDocument": {"uri": "${php_file_uri}"}},
)
.request(
line=line(),
comment="hover after file_close will fail",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 26, "character": 20},
},
result=None,
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=True)
def test_hh_server_status_diagnostic(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("unsaved1.php"))
variables.update(
{
"unsaved2_file_uri": self.repo_file_uri("unsaved2.php"),
"unsaved2_file": self.read_repo_file("unsaved2.php"),
}
)
self.test_driver.stop_hh_server()
spec = (
self.initialize_spec(
LspTestSpec("test_hh_server_status_diagnostic"), use_serverless_ide=True
)
.ignore_status_diagnostics(False)
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.wait_for_notification(
comment="After didOpen(file1), the hh_server_status diagnostic should appear in file1",
method="textDocument/publishDiagnostics",
params={
"uri": "${php_file_uri}",
"diagnostics": [
{
"range": {
"start": {"line": 0, "character": 0},
"end": {"line": 0, "character": 1},
},
"severity": 1,
"source": "hh_server",
"message": "hh_server isn't running, so there may be undetected errors. Try `hh` at the command line... hh_server: stopped.",
"relatedInformation": [],
"relatedLocations": [],
}
],
"isStatusFB": True,
},
)
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${unsaved2_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${unsaved2_file}",
}
},
)
.wait_for_notification(
comment="After didOpen(file2), the hh_server_status diagnostic should disappear from file1",
method="textDocument/publishDiagnostics",
params={
"uri": "${php_file_uri}",
"diagnostics": [],
"isStatusFB": True,
},
)
.wait_for_notification(
comment="After didOpen(file2), the hh_server_status diagnostic should reappear in file2",
method="textDocument/publishDiagnostics",
params={
"uri": "${unsaved2_file_uri}",
"diagnostics": [
{
"range": {
"start": {"line": 0, "character": 0},
"end": {"line": 0, "character": 1},
},
"severity": 1,
"source": "hh_server",
"message": "hh_server isn't running, so there may be undetected errors. Try `hh` at the command line... hh_server: stopped.",
"relatedInformation": [],
"relatedLocations": [],
}
],
"isStatusFB": True,
},
)
.notification(
method="textDocument/didClose",
params={"textDocument": {"uri": "${unsaved2_file_uri}"}},
)
.wait_for_notification(
comment="After didClose(file2), the hh_server_status diagnostic should disappear from file2",
method="textDocument/publishDiagnostics",
params={
"uri": "${unsaved2_file_uri}",
"diagnostics": [],
"isStatusFB": True,
},
)
.wait_for_notification(
comment="After didClose(file2), the hh_server_status diagnostic should reappear in file1",
method="textDocument/publishDiagnostics",
params={
"uri": "${php_file_uri}",
"diagnostics": [
{
"range": {
"start": {"line": 0, "character": 0},
"end": {"line": 0, "character": 1},
},
"severity": 1,
"source": "hh_server",
"message": "hh_server isn't running, so there may be undetected errors. Try `hh` at the command line... hh_server: stopped.",
"relatedInformation": [],
"relatedLocations": [],
}
],
"isStatusFB": True,
},
)
.notification(
method="textDocument/didClose",
params={"textDocument": {"uri": "${php_file_uri}"}},
)
.wait_for_notification(
comment="After didClose(file1), the hh_server_status diagnostic should disappear from file1",
method="textDocument/publishDiagnostics",
params={
"uri": "${php_file_uri}",
"diagnostics": [],
"isStatusFB": True,
},
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=True)
def _sanitize_gutter_line_numbers(self, s: str) -> str:
gutter_line_number_re = re.compile(r"^[ ]*[0-9]+ \|", re.MULTILINE)
return re.sub(gutter_line_number_re, " XXXX |", s)
def test_lsptestspec_incorrect_request_result(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("hover.php"))
self.test_driver.stop_hh_server()
spec = (
self.initialize_spec(
LspTestSpec("test_lsptestspec_incorrect_request_result"),
use_serverless_ide=True,
)
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.request(
line=line(),
comment="hover over function invocation",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 16},
},
result={
"contents": [
{"language": "hack", "value": "int"},
"INCORRECT COMMENT HERE",
],
"range": {
"start": {"line": 3, "character": 9},
"end": {"line": 3, "character": 16},
},
},
powered_by="serverless_ide",
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
try:
self.run_spec(
spec,
variables=variables,
wait_for_server=False,
use_serverless_ide=True,
)
raise AssertionError("Expected an error here")
except AssertionError as e:
self.assertEqual(
self._sanitize_gutter_line_numbers(str(e)),
"""\
Test case test_lsptestspec_incorrect_request_result failed with 1 errors:
Error 1/1:
Description: Request with ID 5 (comment: 'hover over function invocation') \
got an incorrect result:
(+ is expected lines, - is actual lines)
- {'contents': [{'language': 'hack', 'value': 'int'},
+ {'contents': [{'language': 'hack', 'value': 'int'}, 'INCORRECT COMMENT HERE'],
? +++++++++++++++++++++++++++
- 'A comment describing b_hover.'],
'range': {'end': {'character': 16, 'line': 3},
'start': {'character': 9, 'line': 3}}}
Context:
This was the associated request:
hphp/hack/test/integration/test_lsp.py
XXXX | .request(
XXXX | line=line(),
XXXX | comment="hover over function invocation",
XXXX | method="textDocument/hover",
XXXX | params={
XXXX | "textDocument": {"uri": "${php_file_uri}"},
XXXX | "position": {"line": 3, "character": 16},
XXXX | },
XXXX | result={
XXXX | "contents": [
XXXX | {"language": "hack", "value": "int"},
XXXX | "INCORRECT COMMENT HERE",
XXXX | ],
XXXX | "range": {
XXXX | "start": {"line": 3, "character": 9},
XXXX | "end": {"line": 3, "character": 16},
XXXX | },
XXXX | },
XXXX | powered_by="serverless_ide",
XXXX | )
Remediation:
1) If this was unexpected, then the language server is buggy and should be
fixed.
2) If this was expected, you can update your request with the following code to
make it match:
.request(
line=line(),
comment='hover over function invocation',
method='textDocument/hover',
params={'textDocument': {'uri': '${php_file_uri}'}, \
'position': {'line': 3, 'character': 16}},
result={'contents': [{'language': 'hack', 'value': 'int'}, \
'A comment describing b_hover.'], \
'range': {'start': {'line': 3, 'character': 9}, \
'end': {'line': 3, 'character': 16}}},
powered_by='serverless_ide',
)
If you want to examine the raw LSP logs, you can check the `.sent.log` and
`.received.log` files that were generated in the template repo for this test.\
""",
)
def test_lsptestspec_unexpected_notification(self) -> None:
self.prepare_server_environment()
variables = self.setup_php_file("didchange.php")
spec = (
self.initialize_spec(LspTestSpec("did_change"), use_serverless_ide=False)
.wait_for_hh_server_ready()
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.notification(
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 7, "character": 11},
"end": {"line": 7, "character": 12},
},
"text": "a",
}
],
},
)
.wait_for_notification(
method="textDocument/publishDiagnostics",
params={
"uri": "${php_file_uri}",
"diagnostics": [
{
"range": {
"start": {"line": 7, "character": 11},
"end": {"line": 7, "character": 11},
},
"severity": 1,
"code": 1002,
"source": "Hack",
"message": "A semicolon ; is expected here.",
"relatedLocations": [],
"relatedInformation": [],
}
],
},
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
try:
self.run_spec(
spec, variables, wait_for_server=True, use_serverless_ide=False
)
raise AssertionError("Expected an error here")
except AssertionError as e:
self.assertEqual(
self._sanitize_gutter_line_numbers(str(e)),
"""\
Test case did_change failed with 1 errors:
Error 1/1:
Description: An unexpected notification of type \
'textDocument/publishDiagnostics' was sent by the language server.
Here is the notification payload:
{'jsonrpc': '2.0',
'method': 'textDocument/publishDiagnostics',
'params': {'diagnostics': [],
'uri': '__PHP_FILE_URI__'}}
Context:
This was the most recent request issued from the language client before it
received the notification:
hphp/hack/test/integration/test_lsp.py
XXXX | .request(line=line(), method="shutdown", params={}, result=None)
Remediation:
1) If this was unexpected, then the language server is buggy and should be
fixed.
2) If all notifications of type 'textDocument/publishDiagnostics' should be \
ignored, add this directive
anywhere in your test:
.ignore_notifications(method='textDocument/publishDiagnostics')
3) If this single instance of the notification was expected, add this directive
to your test to wait for it before proceeding:
.wait_for_notification(
method='textDocument/publishDiagnostics',
params={'uri': '${php_file_uri}', 'diagnostics': []},
)
If you want to examine the raw LSP logs, you can check the `.sent.log` and
`.received.log` files that were generated in the template repo for this test.\
"""
# There's an instance of a literal `${php_file_uri}` in there
# which we don't want to change, so use a different name than
# that one.
.replace("__PHP_FILE_URI__", variables["php_file_uri"]),
)
def test_serverless_ide_highlight(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("highlight.php"))
self.test_driver.stop_hh_server()
spec = (
self.initialize_spec(
LspTestSpec("serverless_ide_highlight"), use_serverless_ide=True
)
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.request(
line=line(),
comment="document highlight, id 2",
method="textDocument/documentHighlight",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 10},
},
result=[
{
"range": {
"start": {"line": 3, "character": 9},
"end": {"line": 3, "character": 20},
}
}
],
powered_by="serverless_ide",
)
.request(
line=line(),
comment="shutdown, id 3",
method="shutdown",
params={},
result=None,
)
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=True)
def test_serverless_ide_coverage(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("coverage.php"))
self.test_driver.stop_hh_server()
spec = (
self.initialize_spec(
LspTestSpec("serverless_ide_coverage"), use_serverless_ide=True
)
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.request(
line=line(),
comment="Check type coverage",
method="textDocument/typeCoverage",
params={"textDocument": {"uri": "${php_file_uri}"}},
result={
"coveredPercent": 0,
"uncoveredRanges": [
{
"range": {
"start": {"line": 3, "character": 12},
"end": {"line": 3, "character": 17},
}
},
{
"range": {
"start": {"line": 3, "character": 8},
"end": {"line": 3, "character": 10},
}
},
{
"range": {
"start": {"line": 3, "character": 2},
"end": {"line": 3, "character": 5},
}
},
],
"defaultMessage": "Un-type checked code. Consider adding type annotations.",
},
powered_by="serverless_ide",
)
.request(
line=line(),
comment="Shutdown",
method="shutdown",
params={},
result=None,
)
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=True)
def test_status_stopped(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("hover.php"))
self.test_driver.stop_hh_server()
spec = (
self.initialize_spec(
LspTestSpec("status_stopped"),
use_serverless_ide=False,
supports_status=True,
)
.wait_for_server_request(
method="window/showStatus",
params={
"shortMessage": "Hack: stopped",
"message": "hh_server: stopped.",
"actions": [{"title": "Restart hh_server"}],
"type": 1,
},
result=NoResponse(),
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=False)
def test_status_running(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("hover.php"))
spec = (
self.initialize_spec(
LspTestSpec("status_running"),
use_serverless_ide=False,
supports_status=True,
)
.ignore_requests(
comment="Ignore initializing... requests since they're racy",
method="window/showStatus",
params={
"type": 2,
"shortMessage": "Hack: initializing",
"message": "hh_server initializing: processing [<test> seconds]",
"actions": [],
},
)
.wait_for_server_request(
method="window/showStatus",
params={"actions": [], "message": "hh_server: ready.", "type": 3},
result=NoResponse(),
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=True, use_serverless_ide=False)
def test_serverless_ide_status_stopped(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("hover.php"))
self.test_driver.stop_hh_server()
spec = (
self.initialize_spec(
LspTestSpec("serverless_ide_status_stopped"),
use_serverless_ide=True,
supports_status=True,
)
.ignore_requests(
comment="ignore initializing... messages since they're kind of racy",
method="window/showStatus",
params={
"type": 2,
"actions": [{"title": "Restart hh_server"}],
"message": "Hack IDE: initializing.\nhh_server: stopped.",
"shortMessage": "Hack: initializing",
},
)
.ignore_requests(
comment="another racy initialization to ignore, before hh_server has even reported its status",
method="window/showStatus",
params={
"type": 2,
"actions": [],
"message": "Hack IDE: initializing.",
"shortMessage": "Hack: initializing",
},
)
.ignore_requests(
comment="another racy initialization to ignore, again before hh_server",
method="window/showStatus",
params={
"type": 3,
"actions": [],
"message": "Hack IDE: ready.",
"shortMessage": "Hack: ready",
},
)
.wait_for_server_request(
method="window/showStatus",
params={
"message": "Hack IDE: ready.\nhh_server: stopped.",
"shortMessage": "Hack: ready",
"actions": [{"title": "Restart hh_server"}],
"type": 3,
},
result=NoResponse(),
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=True)
def test_serverless_ide_status_restart(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("hover.php"))
spec = (
self.initialize_spec(
LspTestSpec("serverless_ide_status_restart"),
use_serverless_ide=True,
supports_status=True,
)
.ignore_requests(
comment="Ignore initializing messages since they're racy",
method="window/showStatus",
params={
"type": 2,
"actions": [],
"message": "Hack IDE: initializing.\nhh_server initializing: processing [<test> seconds]",
"shortMessage": "Hack: initializing",
},
)
.ignore_requests(
comment="Another form of initializing to ignore",
method="window/showStatus",
params={
"type": 2,
"actions": [],
"message": "Hack IDE: initializing.\nhh_server: ready.",
"shortMessage": "Hack: initializing",
},
)
.ignore_requests(
comment="Another form of initializing to ignore before we've even heard the first peep from hh_server",
method="window/showStatus",
params={
"type": 2,
"actions": [],
"message": "Hack IDE: initializing.",
"shortMessage": "Hack: initializing",
},
)
.ignore_requests(
comment="another racy initialization to ignore, again before hh_server",
method="window/showStatus",
params={
"type": 3,
"actions": [],
"message": "Hack IDE: ready.",
"shortMessage": "Hack: ready",
},
)
.wait_for_server_request(
method="window/showStatus",
params={
"actions": [],
"message": "Hack IDE: ready.\nhh_server: ready.",
"shortMessage": "Hack: ready",
"type": 3,
},
result=NoResponse(),
)
.request(
line=line(),
method="$test/shutdownServerlessIde",
params={},
result=None,
powered_by="serverless_ide",
)
.wait_for_server_request(
method="window/showStatus",
params={
"actions": [{"title": "Restart Hack IDE"}],
"message": "Hack IDE has failed. See Output›Hack for details.\nhh_server: ready.",
"shortMessage": "Hack: failed",
"type": 1,
},
result={"title": "Restart Hack IDE"},
)
.wait_for_server_request(
method="window/showStatus",
params={
"actions": [],
"message": "Hack IDE: ready.\nhh_server: ready.",
"shortMessage": "Hack: ready",
"type": 3,
},
result=NoResponse(),
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=True, use_serverless_ide=True)
def test_serverless_ide_failed_to_load_saved_state(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("hover.php"))
assert "naming_table_saved_state_path" in variables
variables["naming_table_saved_state_path"] = "/tmp/nonexistent"
spec = (
self.initialize_spec(
LspTestSpec("serverless_ide_status_failed_to_load_saved_state"),
use_serverless_ide=True,
supports_status=True,
supports_init=True,
)
.ignore_requests(
comment="Ignore initializing since they're kind of racy",
method="window/showStatus",
params={
"type": 2,
"actions": [],
"message": "Hack IDE: initializing.\nhh_server initializing: processing [<test> seconds]",
"shortMessage": "Hack: initializing",
},
)
.ignore_requests(
comment="Ignore another form of initializing",
method="window/showStatus",
params={
"type": 2,
"actions": [],
"message": "Hack IDE: initializing.\nhh_server: ready.",
"shortMessage": "Hack: initializing",
},
)
.ignore_requests(
comment="Ignore another form of initializing, from before we've even heard the first peep out of hh_server",
method="window/showStatus",
params={
"type": 2,
"actions": [],
"message": "Hack IDE: initializing.",
"shortMessage": "Hack: initializing",
},
)
.ignore_requests(
comment="Ignore another form of initializing, again before hh_server",
method="window/showStatus",
params={
"type": 1,
"actions": [{"title": "Restart Hack IDE"}],
"message": "Hack IDE has failed. See Output›Hack for details.",
"shortMessage": "Hack: failed",
},
)
.wait_for_notification(
method="window/logMessage",
params={
"type": 1,
"message": "Hack IDE has failed.\nThis is unexpected.\nPlease file a bug within your IDE.\nMore details: http://dummy/HH_TEST_MODE",
},
)
.wait_for_server_request(
method="window/showStatus",
params={
"actions": [{"title": "Restart Hack IDE"}],
"message": "Hack IDE has failed. See Output›Hack for details.\nhh_server: ready.",
"shortMessage": "Hack: failed",
"type": 1,
},
result=NoResponse(),
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=True, use_serverless_ide=True)
def test_workspace_symbol(self) -> None:
self.prepare_server_environment()
variables = self.setup_php_file("didchange.php")
spec = (
self.initialize_spec(
LspTestSpec("test_workspace_symbol"), use_serverless_ide=False
)
.wait_for_hh_server_ready()
.request(
line=line(),
comment="Look up symbols",
method="workspace/symbol",
params={"query": "TestNS\\test"},
result=[
{
"name": "TestNS\\test_func",
"kind": 12,
"location": {
"uri": "file://${root_path}/completion_extras_namespace.php",
"range": {
"start": {"line": 4, "character": 9},
"end": {"line": 4, "character": 25},
},
},
}
],
)
.request(
line=line(),
comment="Look up symbols starting with 'test_f' within multiple namespaces",
method="workspace/symbol",
params={"query": "test_f"},
result=[
{
"name": "test_function",
"kind": 12,
"location": {
"uri": "file://${root_path}/completion.php",
"range": {
"start": {"line": 7, "character": 9},
"end": {"line": 7, "character": 22},
},
},
},
{
"name": "TestNS\\test_func",
"kind": 12,
"location": {
"uri": "file://${root_path}/completion_extras_namespace.php",
"range": {
"start": {"line": 4, "character": 9},
"end": {"line": 4, "character": 25},
},
},
},
],
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=True, use_serverless_ide=False)
def test_serverless_ide_during_hh_server_restart(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("didchange.php"))
spec = (
self.initialize_spec(
LspTestSpec("test_serverless_ide_during_hh_server_restart"),
use_serverless_ide=True,
)
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.notification(
comment="Send a 'didChange' notification before HH Server is functional.",
method="textDocument/didChange",
params={
"textDocument": {"uri": "${php_file_uri}"},
"contentChanges": [
{
"range": {
"start": {"line": 7, "character": 9},
"end": {"line": 7, "character": 11},
},
"text": "'foo'",
}
],
},
)
.start_hh_server("Start HH Server; should detect the bad edit")
.wait_for_notification(
method="textDocument/publishDiagnostics",
params={
"uri": "${php_file_uri}",
"diagnostics": [
{
"code": 4110,
"message": "Invalid return type",
"range": {
"end": {"character": 14, "line": 7},
"start": {"character": 9, "line": 7},
},
"relatedInformation": [
{
"location": {
"range": {
"end": {"character": 27, "line": 6},
"start": {"character": 24, "line": 6},
},
"uri": "${php_file_uri}",
},
"message": "Expected int",
},
{
"location": {
"range": {
"end": {"character": 14, "line": 7},
"start": {"character": 9, "line": 7},
},
"uri": "${php_file_uri}",
},
"message": "But got string",
},
],
"relatedLocations": [
{
"location": {
"range": {
"end": {"character": 27, "line": 6},
"start": {"character": 24, "line": 6},
},
"uri": "${php_file_uri}",
},
"message": "Expected int",
},
{
"location": {
"range": {
"end": {"character": 14, "line": 7},
"start": {"character": 9, "line": 7},
},
"uri": "${php_file_uri}",
},
"message": "But got string",
},
],
"severity": 1,
"source": "Hack",
}
],
},
)
.stop_hh_server("Shutdown HH Server")
.start_hh_server("Restart HH Server")
.wait_for_notification(
comment="On startup it thinks everything is okay ...",
method="textDocument/publishDiagnostics",
params={"uri": "${php_file_uri}", "diagnostics": []},
)
.wait_for_notification(
comment="But then hh_server sends a hello message and it gets the edited files, which leads it to see the problem.",
method="textDocument/publishDiagnostics",
params={
"uri": "${php_file_uri}",
"diagnostics": [
{
"code": 4110,
"message": "Invalid return type",
"range": {
"end": {"character": 14, "line": 7},
"start": {"character": 9, "line": 7},
},
"relatedInformation": [
{
"location": {
"range": {
"end": {"character": 27, "line": 6},
"start": {"character": 24, "line": 6},
},
"uri": "${php_file_uri}",
},
"message": "Expected int",
},
{
"location": {
"range": {
"end": {"character": 14, "line": 7},
"start": {"character": 9, "line": 7},
},
"uri": "${php_file_uri}",
},
"message": "But got string",
},
],
"relatedLocations": [
{
"location": {
"range": {
"end": {"character": 27, "line": 6},
"start": {"character": 24, "line": 6},
},
"uri": "${php_file_uri}",
},
"message": "Expected int",
},
{
"location": {
"range": {
"end": {"character": 14, "line": 7},
"start": {"character": 9, "line": 7},
},
"uri": "${php_file_uri}",
},
"message": "But got string",
},
],
"severity": 1,
"source": "Hack",
}
],
},
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=True, use_serverless_ide=True)
def test_serverless_ide_naming_error1(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables.update(self.setup_php_file("didchange.php"))
variables.update(
{
"main_file": self.repo_file("main.php"),
"main_file_contents": """\
<?hh
function main(): int {
return aaa();
}
""",
"file_a": self.repo_file("a.php"),
"file_b": self.repo_file("b.php"),
}
)
spec = (
self.initialize_spec(
LspTestSpec("serverless_ide_naming_error1"), use_serverless_ide=True
)
.write_to_disk(
uri="${main_file}", contents="${main_file_contents}", notify=True
)
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${main_file}",
"languageId": "hack",
"version": 1,
"text": "${main_file_contents}",
}
},
)
.request(
line=line(),
comment="Ensure that hover over `aaa` works even when the name is not yet defined",
method="textDocument/hover",
params={
"textDocument": {"uri": "${main_file}"},
"position": {"line": 2, "character": 13},
},
result={
"contents": [{"language": "hack", "value": "_"}],
"range": {
"start": {"line": 2, "character": 11},
"end": {"line": 2, "character": 14},
},
},
powered_by="serverless_ide",
)
.write_to_disk(
comment="create file A",
uri="${file_a}",
contents="""\
<?hh
function aaa(): int {
return 1;
}
""",
notify=True,
)
.request(
line=line(),
comment="Ensure that hover over `aaa` works when there are no naming errors",
method="textDocument/hover",
params={
"textDocument": {"uri": "${main_file}"},
"position": {"line": 2, "character": 13},
},
result={
"contents": [
{"language": "hack", "value": "function aaa(): int"},
"Return type: `int`",
],
"range": {
"start": {"line": 2, "character": 11},
"end": {"line": 2, "character": 14},
},
},
powered_by="serverless_ide",
)
.write_to_disk(
comment="create file B",
uri="${file_b}",
contents="""\
<?hh
function aaa(): string {
return "foo";
}
""",
notify=True,
)
.request(
line=line(),
comment="Ensure that hover over `aaa` works even when there is a duplicate name",
method="textDocument/hover",
params={
"textDocument": {"uri": "${main_file}"},
"position": {"line": 2, "character": 13},
},
result={
"contents": [
{"language": "hack", "value": "function aaa(): int"},
"Return type: `int`",
],
"range": {
"start": {"line": 2, "character": 11},
"end": {"line": 2, "character": 14},
},
},
powered_by="serverless_ide",
)
.write_to_disk(
comment="delete file A", uri="${file_a}", contents=None, notify=True
)
.request(
line=line(),
comment="Now that we've fixed the error, hover should work.",
method="textDocument/hover",
params={
"textDocument": {"uri": "${main_file}"},
"position": {"line": 2, "character": 13},
},
result={
"contents": [
{"language": "hack", "value": "function aaa(): string"},
"Return type: `string`",
],
"range": {
"start": {"line": 2, "character": 11},
"end": {"line": 2, "character": 14},
},
},
powered_by="serverless_ide",
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=True)
def test_serverless_ide_naming_error2(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
self.test_driver.stop_hh_server()
variables.update(self.setup_php_file("naming_error_caller.php"))
variables.update(
{
"contents": self.read_repo_file("naming_error_declaration.php"),
"original": self.repo_file("naming_error_declaration.php"),
"copy": self.repo_file("naming_error_copy.php"),
}
)
spec = (
self.initialize_spec(
LspTestSpec("serverless_ide_naming_error2"), use_serverless_ide=True
)
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.write_to_disk(
comment="create copy",
uri="${copy}",
contents="${contents}",
notify=True,
)
.write_to_disk(
comment="delete copy", uri="${copy}", contents=None, notify=True
)
.request(
line=line(),
comment="hover should work fine after making copy then deleting copy.",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 15},
},
result={
"contents": [
{
"language": "hack",
"value": "function naming_error_declaration(): void",
},
"Return type: `void`",
],
"range": {
"start": {"line": 3, "character": 2},
"end": {"line": 3, "character": 26},
},
},
powered_by="serverless_ide",
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=True)
def test_serverless_ide_naming_error3(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
self.test_driver.stop_hh_server()
variables.update(self.setup_php_file("naming_error_caller.php"))
variables.update(
{
"contents": self.read_repo_file("naming_error_declaration.php"),
"original": self.repo_file("naming_error_declaration.php"),
"copy": self.repo_file("naming_error_copy.php"),
}
)
spec = (
self.initialize_spec(
LspTestSpec("serverless_ide_naming_error3"), use_serverless_ide=True
)
.notification(
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "${php_file_uri}",
"languageId": "hack",
"version": 1,
"text": "${php_file}",
}
},
)
.write_to_disk(
comment="create copy",
uri="${copy}",
contents="${contents}",
notify=True,
)
.write_to_disk(
comment="delete original", uri="${original}", contents=None, notify=True
)
.request(
line=line(),
comment="hover should work fine after making copy then deleting original.",
method="textDocument/hover",
params={
"textDocument": {"uri": "${php_file_uri}"},
"position": {"line": 3, "character": 15},
},
result={
"contents": [
{
"language": "hack",
"value": "function naming_error_declaration(): void",
},
"Return type: `void`",
],
"range": {
"start": {"line": 3, "character": 2},
"end": {"line": 3, "character": 26},
},
},
powered_by="serverless_ide",
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=True)
def test_serverless_ide_requests_before_init(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables["root_path"] = self.test_driver.repo_dir
self.test_driver.stop_hh_server()
spec = (
self.initialize_spec(
LspTestSpec("test_serverless_ide_requests_before_init"),
use_serverless_ide=True,
supports_status=True,
supports_init=True,
)
.ignore_notifications(method="textDocument/publishDiagnostics")
.ignore_requests(
comment="Ignore 'initializing...' messages since they're racy",
method="window/showStatus",
params={
"type": 2,
"actions": [{"title": "Restart hh_server"}],
"message": "Hack IDE: initializing.\nhh_server: stopped.",
"shortMessage": "Hack: initializing",
},
)
.ignore_requests(
comment="another racy initialization, before we've yet heard from hh_server",
method="window/showStatus",
params={
"type": 2,
"actions": [],
"message": "Hack IDE: initializing.",
"shortMessage": "Hack: initializing",
},
)
.ignore_requests(
comment="another racy initialization, if HackIDE is done before hh_server has yet sent status",
method="window/showStatus",
params={
"type": 3,
"actions": [],
"message": "Hack IDE: ready.",
"shortMessage": "Hack: ready",
},
)
.write_to_disk(
notify=True,
wait=False,
uri="file://${root_path}/beforeInit1.php",
contents="<?hh // strict\nfunction beforeInit1(): int {\n return 42;\n}\n",
)
.notification(
comment="open a file before init has finished",
method="textDocument/didOpen",
params={
"textDocument": {
"uri": "file://${root_path}/beforeInit2.php",
"languageId": "hack",
"version": 1,
"text": "<?hh // strict\nfunction beforeInit2(): void {\n $foo = beforeInit1();\n}\n",
}
},
)
.request(
line=line(),
comment="hover before init will fail",
method="textDocument/hover",
params={
"textDocument": {"uri": "file://${root_path}/beforeInit2.php"},
"position": {"line": 2, "character": 4},
},
result=None,
)
.request(
line=line(),
comment="documentSymbol before init will succeed",
method="textDocument/documentSymbol",
params={"textDocument": {"uri": "file://${root_path}/beforeInit2.php"}},
result=[
{
"name": "beforeInit2",
"kind": 12,
"location": {
"uri": "file://${root_path}/beforeInit2.php",
"range": {
"start": {"line": 1, "character": 0},
"end": {"line": 3, "character": 1},
},
},
}
],
powered_by="serverless_ide",
)
.wait_for_notification(
comment="wait for sIDE to init",
method="telemetry/event",
params={"type": 4, "message": "[client-ide] Finished init: ok"},
)
.wait_for_server_request(
method="window/showStatus",
params={
"actions": [{"title": "Restart hh_server"}],
"message": "Hack IDE: ready.\nhh_server: stopped.",
"shortMessage": "Hack: ready",
"type": 3,
},
result=NoResponse(),
)
.request(
line=line(),
comment="hover after init will succeed",
method="textDocument/hover",
params={
"textDocument": {"uri": "file://${root_path}/beforeInit2.php"},
"position": {"line": 2, "character": 4},
},
result={
"contents": [{"language": "hack", "value": "int"}],
"range": {
"start": {"line": 2, "character": 2},
"end": {"line": 2, "character": 6},
},
},
powered_by="serverless_ide",
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=True)
def test_serverless_ide_workspace_symbol(self) -> None:
variables = dict(self.prepare_serverless_ide_environment())
variables["root_path"] = self.test_driver.repo_dir
self.test_driver.stop_hh_server()
spec = (
self.initialize_spec(
LspTestSpec("serverless_ide_workspace_symbol"), use_serverless_ide=True
)
.request(
line=line(),
comment="workspace symbol call, global, powered by sqlite (generated during serverless-ide-init)",
method="workspace/symbol",
params={"query": "TakesString"},
result=[
{
"name": "TakesString",
"kind": 5,
"location": {
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 36, "character": 6},
"end": {"line": 36, "character": 17},
},
},
}
],
powered_by="serverless_ide",
)
.request(
line=line(),
comment="workspace symbol call, member (derived from naming-table)",
method="workspace/symbol",
params={"query": "TakesString::"},
result=[
{
"name": "__construct",
"kind": 6,
"location": {
"uri": "file://${root_path}/definition.php",
"range": {
"start": {"line": 37, "character": 18},
"end": {"line": 37, "character": 29},
},
},
}
],
powered_by="serverless_ide",
)
.request(line=line(), method="shutdown", params={}, result=None)
.notification(method="exit", params={})
)
self.run_spec(spec, variables, wait_for_server=False, use_serverless_ide=True)
| 41.602701
| 153
| 0.366807
| 18,254
| 280,319
| 5.487674
| 0.052043
| 0.020964
| 0.022861
| 0.028681
| 0.850058
| 0.826988
| 0.806862
| 0.793176
| 0.772921
| 0.757677
| 0
| 0.014247
| 0.509741
| 280,319
| 6,737
| 154
| 41.608876
| 0.714624
| 0.010142
| 0
| 0.651816
| 0
| 0.001087
| 0.251509
| 0.037953
| 0
| 0
| 0
| 0.000148
| 0.004967
| 1
| 0.010556
| false
| 0
| 0.002794
| 0.001087
| 0.017852
| 0.000155
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a296a773bafc9087dba6705ad7d749540a9c690
| 10,584
|
py
|
Python
|
frozen/mcudev/fsmc.py
|
seelpro/BLACK_F407ZG
|
d3166a2a5c0d59a9a9e534c9a57f839028badde4
|
[
"MIT"
] | null | null | null |
frozen/mcudev/fsmc.py
|
seelpro/BLACK_F407ZG
|
d3166a2a5c0d59a9a9e534c9a57f839028badde4
|
[
"MIT"
] | null | null | null |
frozen/mcudev/fsmc.py
|
seelpro/BLACK_F407ZG
|
d3166a2a5c0d59a9a9e534c9a57f839028badde4
|
[
"MIT"
] | null | null | null |
import machine
import stm
RCC_AHB3ENR_FSMCEN = const(0x01)
RCC_AHB1ENR_GPIODEN = const(0x08)
RCC_AHB1ENR_GPIOEEN = const(0x10)
RCC_AHB1ENR_GPIOFEN = const(0x20)
RCC_AHB1ENR_GPIOGEN = const(0x40)
FSMC_Banks_base = const(0xA0000000)
FSMC_BCR_bank1_offset = const(0x0)
FSMC_BTR_bank1_offset = const(0x4)
FSMC_BCR_bank2_offset = const(0x8)
FSMC_BTR_bank2_offset = const(0xC)
FSMC_BCR_bank3_offset = const(0x10)
FSMC_BTR_bank3_offset = const(0x14)
FSMC_BCR_bank4_offset = const(0x18)
FSMC_BTR_bank4_offset = const(0x1c)
FSMC_BCRx_MWID_8bit = const(0x00000000)
FSMC_BCRx_MWID_16bit = const(0x00000010)
FSMC_BCRx_WREN = const(0x00001000)
FSMC_BCRx_MBKEN = const(0x1)
LCD_REG = const(0x60000000)
LCD_RAM = const(0x60080000)
LCD_Zx_REG = const(0x6C000000)
LCD_Zx_RAM = const(0x6C000080)
@micropython.viper
def _mem_SET(addr,data):
_tmp = data
machine.mem32[addr] = _tmp
@micropython.viper
def _mem_OR(addr,data):
_tmp = data
machine.mem32[addr] |= _tmp
@micropython.viper
def _mem_AND_OR(addr,anddata,data):
_tmp = anddata
machine.mem32[addr] &= _tmp
_tmp = data
machine.mem32[addr] |= _tmp
#class MCU_DevF4Zx_Display():
class FSMC_Display_Zx():
def __init__(self):
self.FSMC_BTR1_ADDSET_T = 0x00000001 # ILI9341, 90ns
self.FSMC_BTR1_DATAST_T = 0x00000400 # HCLK cycles
self.FSMC_BTR1_ADDHLD_T = 0x00000010
self.FSMC_BTR3_ADDSET_T = 0x00000000
self.FSMC_BTR3_DATAST_T = 0x00000000
self.FSMC_BTR3_ADDHLD_T = 0x00000000
@micropython.viper
def init_fsmc_disp(self):
_mem_OR(stm.RCC + stm.RCC_AHB3ENR,RCC_AHB3ENR_FSMCEN) # FSMC clock enable
_mem_OR(stm.RCC + stm.RCC_AHB1ENR, RCC_AHB1ENR_GPIODEN | RCC_AHB1ENR_GPIOEEN | \
RCC_AHB1ENR_GPIOFEN | RCC_AHB1ENR_GPIOGEN) # GPIO clock Enable
_mem_AND_OR(stm.GPIOD + stm.GPIO_MODER,0b10101010101010101011101011111010,0b10101010101010101000101000001010)
#machine.mem32[stm.GPIOD + stm.GPIO_MODER] = 0b10101010101010101000101000001010
_mem_AND_OR(stm.GPIOE + stm.GPIO_MODER,0b10101010101010101011111111111010,0b10101010101010101000000000001010)
#machine.mem32[stm.GPIOE + stm.GPIO_MODER] = 0b10101010101010101000000000001010
_mem_AND_OR(stm.GPIOF + stm.GPIO_MODER,0b10101010111111111111101010101010,0b10101010000000000000101010101010)
#machine.mem32[stm.GPIOF + stm.GPIO_MODER] = 0b10101010000000000000101010101010
_mem_AND_OR(stm.GPIOG + stm.GPIO_MODER,0b11111110111011111111101010101010,0b00000010001000000000101010101010)
#machine.mem32[stm.GPIOG + stm.GPIO_MODER] = 0b00000010001000000000101010101010
machine.mem32[stm.GPIOD + stm.GPIO_OSPEEDR] = 0xFFFFFFFF # 0x54154525
machine.mem32[stm.GPIOE + stm.GPIO_OSPEEDR] = 0xFFFFFFFF # 0x55554000
machine.mem32[stm.GPIOF + stm.GPIO_OSPEEDR] = 0xFFFFFFFF #
machine.mem32[stm.GPIOG + stm.GPIO_OSPEEDR] = 0xFFFFFFFF #
# Alternate Function = FSMC = (0x0C) = 12
_mem_AND_OR(stm.GPIOD + stm.GPIO_AFR0,0xFFCCFFCC,0x00CC00CC)
#machine.mem32[stm.GPIOD + stm.GPIO_AFR0] = 0x00CC00CC
machine.mem32[stm.GPIOD + stm.GPIO_AFR1] = 0xCCCCCCCC
_mem_AND_OR(stm.GPIOE + stm.GPIO_AFR0,0xCFFFFFCC,0xC00000CC)
#machine.mem32[stm.GPIOE + stm.GPIO_AFR0] = 0xC00000CC
machine.mem32[stm.GPIOE + stm.GPIO_AFR1] = 0xCCCCCCCC
_mem_AND_OR(stm.GPIOF + stm.GPIO_AFR0,0xFFCCCCCC,0x00CCCCCC)
#machine.mem32[stm.GPIOF + stm.GPIO_AFR0] = 0x00CCCCCC
_mem_AND_OR(stm.GPIOF + stm.GPIO_AFR1,0xCCCCFFFF,0xCCCC0000)
#machine.mem32[stm.GPIOF + stm.GPIO_AFR1] = 0xCCCC0000
_mem_AND_OR(stm.GPIOG + stm.GPIO_AFR0,0xFFCCCCCC,0x00CCCCCC)
#machine.mem32[stm.GPIOG + stm.GPIO_AFR0] = 0x00CCCCCC
_mem_AND_OR(stm.GPIOG + stm.GPIO_AFR1,0xFFFCFCFF,0x000C0C00)
#machine.mem32[stm.GPIOG + stm.GPIO_AFR1] = 0x000C0C00
machine.mem32[FSMC_Banks_base + FSMC_BTR_bank4_offset] = self.FSMC_BTR1_ADDSET_T | self.FSMC_BTR1_DATAST_T | self.FSMC_BTR1_ADDHLD_T # | FSMC_BTR1_CLKDIV_1 | FSMC_BTR1_ACCMOD
_mem_SET(FSMC_Banks_base + FSMC_BCR_bank4_offset,FSMC_BCRx_MWID_16bit | FSMC_BCRx_WREN | FSMC_BCRx_MBKEN)
@micropython.viper
def init_fsmc_ram(self):
_mem_OR(stm.RCC + stm.RCC_AHB3ENR,RCC_AHB3ENR_FSMCEN)
_mem_OR(stm.RCC + stm.RCC_AHB1ENR, RCC_AHB1ENR_GPIODEN | RCC_AHB1ENR_GPIOEEN | \
RCC_AHB1ENR_GPIOFEN | RCC_AHB1ENR_GPIOGEN) # GPIO clock Enable
_mem_AND_OR(stm.GPIOD + stm.GPIO_MODER,0b10101010101010101011101011111010,0b10101010101010101000101000001010)
#machine.mem32[stm.GPIOD + stm.GPIO_MODER] = 0b10101010101010101000101000001010
_mem_AND_OR(stm.GPIOE + stm.GPIO_MODER,0b10101010101010101011111111111010,0b10101010101010101000000000001010)
#machine.mem32[stm.GPIOE + stm.GPIO_MODER] = 0b10101010101010101000000000001010
_mem_AND_OR(stm.GPIOF + stm.GPIO_MODER,0b10101010111111111111101010101010,0b10101010000000000000101010101010)
#machine.mem32[stm.GPIOF + stm.GPIO_MODER] = 0b10101010000000000000101010101010
_mem_AND_OR(stm.GPIOG + stm.GPIO_MODER,0b11111110111011111111101010101010,0b00000010001000000000101010101010)
#machine.mem32[stm.GPIOG + stm.GPIO_MODER] = 0b00000010001000000000101010101010
machine.mem32[stm.GPIOD + stm.GPIO_OSPEEDR] = 0xFFFFFFFF
machine.mem32[stm.GPIOE + stm.GPIO_OSPEEDR] = 0xFFFFFFFF
machine.mem32[stm.GPIOF + stm.GPIO_OSPEEDR] = 0xFFFFFFFF
machine.mem32[stm.GPIOG + stm.GPIO_OSPEEDR] = 0xFFFFFFFF
_mem_AND_OR(stm.GPIOD + stm.GPIO_AFR0,0xFFCCFFCC,0x00CC00CC)
#machine.mem32[stm.GPIOD + stm.GPIO_AFR0] = 0x00CC00CC
machine.mem32[stm.GPIOD + stm.GPIO_AFR1] = 0xCCCCCCCC
_mem_AND_OR(stm.GPIOE + stm.GPIO_AFR0,0xCFFFFFCC,0xC00000CC)
#machine.mem32[stm.GPIOE + stm.GPIO_AFR0] = 0xC00000CC
machine.mem32[stm.GPIOE + stm.GPIO_AFR1] = 0xCCCCCCCC
_mem_AND_OR(stm.GPIOF + stm.GPIO_AFR0,0xFFCCCCCC,0x00CCCCCC)
#machine.mem32[stm.GPIOF + stm.GPIO_AFR0] = 0x00CCCCCC
_mem_AND_OR(stm.GPIOF + stm.GPIO_AFR1,0xCCCCFFFF,0xCCCC0000)
#machine.mem32[stm.GPIOF + stm.GPIO_AFR1] = 0xCCCC0000
_mem_AND_OR(stm.GPIOG + stm.GPIO_AFR0,0xFFCCCCCC,0x00CCCCCC)
#machine.mem32[stm.GPIOG + stm.GPIO_AFR0] = 0x00CCCCCC
_mem_AND_OR(stm.GPIOG + stm.GPIO_AFR1,0xFFFCFCFF,0x000C0C00)
#machine.mem32[stm.GPIOG + stm.GPIO_AFR1] = 0x000C0C00
machine.mem32[FSMC_Banks_base + FSMC_BTR_bank3_offset] = self.FSMC_BTR3_ADDSET_T | self.FSMC_BTR3_DATAST_T | self.FSMC_BTR3_ADDHLD_T
_mem_SET(FSMC_Banks_base + FSMC_BCR_bank3_offset, FSMC_BCRx_MWID_16bit | FSMC_BCRx_WREN | FSMC_BCRx_MBKEN)
#class MCU_DevF4Zx_SRAM():
class FSMC_SRAM():
def __init__(self):
self.FSMC_BTR3_ADDSET_T = 0x00000000
self.FSMC_BTR3_DATAST_T = 0x00000000
self.FSMC_BTR3_ADDHLD_T = 0x00000000
@micropython.viper
def init_fsmc_ram(self):
_mem_OR(stm.RCC + stm.RCC_AHB3ENR,RCC_AHB3ENR_FSMCEN)
_mem_OR(stm.RCC + stm.RCC_AHB1ENR, RCC_AHB1ENR_GPIODEN | RCC_AHB1ENR_GPIOEEN | \
RCC_AHB1ENR_GPIOFEN | RCC_AHB1ENR_GPIOGEN) # GPIO clock Enable
machine.mem32[stm.GPIOD + stm.GPIO_MODER] = 0b10101010101010101000101000001010
machine.mem32[stm.GPIOE + stm.GPIO_MODER] = 0b10101010101010101000000000001010
machine.mem32[stm.GPIOF + stm.GPIO_MODER] = 0b10101010000000000000101010101010
machine.mem32[stm.GPIOG + stm.GPIO_MODER] = 0b00000010001000000000101010101010
machine.mem32[stm.GPIOD + stm.GPIO_OSPEEDR] = 0xFFFFFFFF
machine.mem32[stm.GPIOE + stm.GPIO_OSPEEDR] = 0xFFFFFFFF
machine.mem32[stm.GPIOF + stm.GPIO_OSPEEDR] = 0xFFFFFFFF
machine.mem32[stm.GPIOG + stm.GPIO_OSPEEDR] = 0xFFFFFFFF
machine.mem32[stm.GPIOD + stm.GPIO_AFR0] = 0x00CC00CC
machine.mem32[stm.GPIOD + stm.GPIO_AFR1] = 0xCCCCCCCC
machine.mem32[stm.GPIOE + stm.GPIO_AFR0] = 0xC00000CC
machine.mem32[stm.GPIOE + stm.GPIO_AFR1] = 0xCCCCCCCC
machine.mem32[stm.GPIOF + stm.GPIO_AFR0] = 0x00CCCCCC
machine.mem32[stm.GPIOF + stm.GPIO_AFR1] = 0xCCCC0000
machine.mem32[stm.GPIOG + stm.GPIO_AFR0] = 0x00CCCCCC
machine.mem32[stm.GPIOG + stm.GPIO_AFR1] = 0x000C0C00
machine.mem32[FSMC_Banks_base + FSMC_BTR_bank3_offset] = self.FSMC_BTR3_ADDSET_T | self.FSMC_BTR3_DATAST_T | self.FSMC_BTR3_ADDHLD_T
_mem_SET(FSMC_Banks_base + FSMC_BCR_bank3_offset, FSMC_BCRx_MWID_16bit | FSMC_BCRx_WREN | FSMC_BCRx_MBKEN)
#class MCU_DevF4Vx_Display():
class FSMC_Display_Vx():
def __init__(self):
_mem_OR(stm.RCC + stm.RCC_AHB3ENR,RCC_AHB3ENR_FSMCEN)
_mem_OR(stm.RCC + stm.RCC_AHB1ENR, RCC_AHB1ENR_GPIODEN | RCC_AHB1ENR_GPIOEEN)
self.FSMC_BTR1_ADDSET_T = 0x00000001
self.FSMC_BTR1_DATAST_T = 0x00000400
self.FSMC_BCR1_MWID_0 = 0x00000010
self.FSMC_BCR1_WREN = 0x00001000
self.FSMC_BCR1_MBKEN = 0x1
self.FSMC_BTR1_ADDHLD_0 = 0x00000010
@micropython.viper
def init_fsmc_disp(self):
_mem_OR(stm.RCC + stm.RCC_AHB3ENR,RCC_AHB3ENR_FSMCEN)
_mem_OR(stm.RCC + stm.RCC_AHB1ENR, RCC_AHB1ENR_GPIODEN | RCC_AHB1ENR_GPIOEEN)
# 10 10 10 00 00 10 10 10 10 00 10 10 00 10 10 10
machine.mem32[stm.GPIOD + stm.GPIO_MODER] = 0b10101000001010101000101000101010
machine.mem32[stm.GPIOE + stm.GPIO_MODER] = 0xAAAA8000
machine.mem32[stm.GPIOD + stm.GPIO_OSPEEDR] = 0xFFFFFFFF
machine.mem32[stm.GPIOE + stm.GPIO_OSPEEDR] = 0xFFFFFFFF
machine.mem32[stm.GPIOD + stm.GPIO_AFR0] = ( (0b1100<<(4*0)) | (0b1100<<(4*1)) | (0b1100<<(4*2)) | (0b1100<<(4*4)) | (0b1100<<(4*5)) | (0b1100<<(4*7)) )
machine.mem32[stm.GPIOD + stm.GPIO_AFR1] = ( (0b1100<<(4*(8-8))) | (0b1100<<(4*(9-8))) | (0b1100<<(4*(10-8))) | (0b1100<<(4*(13-8))) | (0b1100<<(4*(14-8))) | (0b1100<<(4*(15-8))))
machine.mem32[stm.GPIOE + stm.GPIO_AFR0] = 0b1100<<(4*7)
machine.mem32[stm.GPIOE + stm.GPIO_AFR1] = 0xCCCCCCCC
machine.mem32[FSMC_Banks_base + FSMC_BCR_bank1_offset] = self.FSMC_BTR1_ADDSET_T | self.FSMC_BTR1_DATAST_T | self.FSMC_BTR1_ADDHLD_0 # | FSMC_BTR1_CLKDIV_1 | FSMC_BTR1_ACCMOD
_mem_SET(FSMC_Banks_base + FSMC_BCR_bank1_offset, FSMC_BCRx_MWID_16bit | FSMC_BCRx_WREN | FSMC_BCRx_MBKEN)
| 57.210811
| 188
| 0.716648
| 1,365
| 10,584
| 5.211722
| 0.104762
| 0.074782
| 0.118077
| 0.030925
| 0.844532
| 0.832584
| 0.810374
| 0.788445
| 0.738825
| 0.738825
| 0
| 0.209456
| 0.186697
| 10,584
| 184
| 189
| 57.521739
| 0.616984
| 0.155045
| 0
| 0.604167
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089153
| 0
| 0
| 1
| 0.069444
| false
| 0
| 0.013889
| 0
| 0.104167
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a367104f4ebbefefacc0d9fb84442ada945d66f
| 144
|
py
|
Python
|
utils/__init__.py
|
ravi-0841/Emocon
|
acf713ddc26999f66416acb490045ea3c1db1ab7
|
[
"MIT"
] | 1
|
2019-11-14T09:56:17.000Z
|
2019-11-14T09:56:17.000Z
|
utils/__init__.py
|
ravi-0841/Emocon
|
acf713ddc26999f66416acb490045ea3c1db1ab7
|
[
"MIT"
] | null | null | null |
utils/__init__.py
|
ravi-0841/Emocon
|
acf713ddc26999f66416acb490045ea3c1db1ab7
|
[
"MIT"
] | null | null | null |
from get_features import *
from predict_params import *
from predict_pitch import *
from predict_energy import *
from generate_waveform import *
| 28.8
| 31
| 0.833333
| 20
| 144
| 5.75
| 0.5
| 0.347826
| 0.443478
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131944
| 144
| 5
| 31
| 28.8
| 0.92
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
8a7cf5797e83bf032fabdc0f334ffb9b7945cd59
| 7,224
|
py
|
Python
|
test/test_feature.py
|
hirano1412/bdpy
|
cee6f36dcdf4f4d29fc3a6980777e1c3d7c66cbb
|
[
"MIT"
] | 18
|
2018-01-22T04:18:48.000Z
|
2022-03-12T09:42:03.000Z
|
test/test_feature.py
|
hirano1412/bdpy
|
cee6f36dcdf4f4d29fc3a6980777e1c3d7c66cbb
|
[
"MIT"
] | 13
|
2018-05-01T08:31:14.000Z
|
2022-02-21T06:45:34.000Z
|
test/test_feature.py
|
hirano1412/bdpy
|
cee6f36dcdf4f4d29fc3a6980777e1c3d7c66cbb
|
[
"MIT"
] | 15
|
2019-03-04T02:43:46.000Z
|
2022-02-17T00:41:47.000Z
|
from unittest import TestCase, TestLoader, TextTestRunner
import numpy as np
from bdpy.feature import normalize_feature
class TestUtilFeature(TestCase):
def test_normalize_feature_1d(self):
feat = np.random.rand(4096)
feat_mean0 = np.random.rand(1, 1)
feat_std0 = np.random.rand(1, 1)
ddof = 1
feat_mean_ch = np.mean(feat, axis=None, keepdims=True)
feat_mean_all = np.mean(feat, axis=None, keepdims=True)
feat_std_ch = np.std(feat, axis=None, ddof=ddof, keepdims=True)
feat_std_all = np.mean(np.std(feat, axis=None, ddof=ddof, keepdims=True), keepdims=True)
# Mean (channel-wise) + SD (channel-wise)
feat_valid = ((feat - feat_mean_ch) / feat_std_ch) * feat_std0 + feat_mean0
feat_test = normalize_feature(feat,
channel_axis=0, channel_wise_mean=True, channel_wise_std=True,
shift=feat_mean0, scale=feat_std0,
std_ddof=1)
np.testing.assert_array_equal(feat_test, feat_valid)
# Mean (channel-wise) + SD (all)
feat_valid = ((feat - feat_mean_ch) / feat_std_all) * feat_std0 + feat_mean0
feat_test = normalize_feature(feat,
channel_axis=0, channel_wise_mean=True, channel_wise_std=False,
shift=feat_mean0, scale=feat_std0,
std_ddof=1)
np.testing.assert_array_equal(feat_test, feat_valid)
# Mean (all) + SD (channel-wise)
feat_valid = ((feat - feat_mean_all) / feat_std_ch) * feat_std0 + feat_mean0
feat_test = normalize_feature(feat,
channel_axis=0, channel_wise_mean=False, channel_wise_std=True,
shift=feat_mean0, scale=feat_std0,
std_ddof=1)
np.testing.assert_array_equal(feat_test, feat_valid)
# Mean (all) + SD (all)
feat_valid = ((feat - feat_mean_all) / feat_std_all) * feat_std0 + feat_mean0
feat_test = normalize_feature(feat,
channel_axis=0, channel_wise_mean=False, channel_wise_std=False,
shift=feat_mean0, scale=feat_std0,
std_ddof=1)
np.testing.assert_array_equal(feat_test, feat_valid)
# Mean (channel-wise) + SD (channel-wise), self-mean shift
feat_valid = ((feat - feat_mean_ch) / feat_std_ch) * feat_std0 + feat_mean_ch
feat_test = normalize_feature(feat,
channel_axis=0, channel_wise_mean=True, channel_wise_std=True,
shift='self', scale=feat_std0,
std_ddof=1)
np.testing.assert_array_equal(feat_test, feat_valid)
# Mean (channel-wise) + SD (channel-wise), self-mean shift and self-SD scale
feat_valid = ((feat - feat_mean_ch) / feat_std_ch) * feat_std_ch + feat_mean_ch
feat_test = normalize_feature(feat,
channel_axis=0, channel_wise_mean=True, channel_wise_std=True,
shift='self', scale='self',
std_ddof=1)
np.testing.assert_array_equal(feat_test, feat_valid)
def test_normalize_feature_3d(self):
feat = np.random.rand(64, 16, 16)
feat_mean0 = np.random.rand(64, 1, 1)
feat_std0 = np.random.rand(64, 1, 1)
ddof = 1
feat_mean_ch = np.mean(feat, axis=(1, 2), keepdims=True)
feat_mean_all = np.mean(feat, axis=None, keepdims=True)
feat_std_ch = np.std(feat, axis=(1, 2), ddof=ddof, keepdims=True)
feat_std_all = np.mean(np.std(feat, axis=(1, 2), ddof=ddof, keepdims=True), keepdims=True)
axes_along = (1, 2)
# Mean (channel-wise) + SD (channel-wise)
feat_valid = ((feat - feat_mean_ch) / feat_std_ch) * feat_std0 + feat_mean0
feat_test = normalize_feature(feat,
channel_axis=0, channel_wise_mean=True, channel_wise_std=True,
shift=feat_mean0, scale=feat_std0,
std_ddof=1)
np.testing.assert_array_equal(feat_test, feat_valid)
# Mean (channel-wise) + SD (all)
feat_valid = ((feat - feat_mean_ch) / feat_std_all) * feat_std0 + feat_mean0
feat_test = normalize_feature(feat,
channel_axis=0, channel_wise_mean=True, channel_wise_std=False,
shift=feat_mean0, scale=feat_std0,
std_ddof=1)
np.testing.assert_array_equal(feat_test, feat_valid)
# Mean (all) + SD (channel-wise)
feat_valid = ((feat - feat_mean_all) / feat_std_ch) * feat_std0 + feat_mean0
feat_test = normalize_feature(feat,
channel_axis=0, channel_wise_mean=False, channel_wise_std=True,
shift=feat_mean0, scale=feat_std0,
std_ddof=1)
np.testing.assert_array_equal(feat_test, feat_valid)
# Mean (all) + SD (all)
feat_valid = ((feat - feat_mean_all) / feat_std_all) * feat_std0 + feat_mean0
feat_test = normalize_feature(feat,
channel_axis=0, channel_wise_mean=False, channel_wise_std=False,
shift=feat_mean0, scale=feat_std0,
std_ddof=1)
np.testing.assert_array_equal(feat_test, feat_valid)
# Mean (channel-wise) + SD (channel-wise), self-mean shift
feat_valid = ((feat - feat_mean_ch) / feat_std_ch) * feat_std0 + feat_mean_ch
feat_test = normalize_feature(feat,
channel_axis=0, channel_wise_mean=True, channel_wise_std=True,
shift='self', scale=feat_std0,
std_ddof=1)
np.testing.assert_array_equal(feat_test, feat_valid)
# Mean (channel-wise) + SD (channel-wise), self-mean shift and self-SD scale
feat_valid = ((feat - feat_mean_ch) / feat_std_ch) * feat_std_ch + feat_mean_ch
feat_test = normalize_feature(feat,
channel_axis=0, channel_wise_mean=True, channel_wise_std=True,
shift='self', scale='self',
std_ddof=1)
# SD scaling only
feat_valid = (feat / feat_std_all) * feat_std0
feat_test = normalize_feature(feat,
scaling_only=True,
channel_wise_std=False,
scale=feat_std0, std_ddof=1)
np.testing.assert_array_equal(feat_test, feat_valid)
if __name__ == '__main__':
suite = TestLoader().loadTestsFromTestCase(TestUtilFeature)
TextTestRunner(verbosity=2).run(suite)
| 46.606452
| 102
| 0.557032
| 869
| 7,224
| 4.286536
| 0.071346
| 0.121074
| 0.080537
| 0.059329
| 0.910604
| 0.879195
| 0.86255
| 0.850738
| 0.849664
| 0.842148
| 0
| 0.021874
| 0.354513
| 7,224
| 154
| 103
| 46.909091
| 0.776968
| 0.072951
| 0
| 0.747573
| 0
| 0
| 0.004788
| 0
| 0
| 0
| 0
| 0
| 0.116505
| 1
| 0.019417
| false
| 0
| 0.029126
| 0
| 0.058252
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8aa1068bbb1315de8443a574e7e9c846e96c5cee
| 7,248
|
py
|
Python
|
tests/garage/torch/policies/test_gaussian_mlp_policy.py
|
st2yang/garage
|
50186a9630df038aeba36d6b06b006ab32ed48f5
|
[
"MIT"
] | null | null | null |
tests/garage/torch/policies/test_gaussian_mlp_policy.py
|
st2yang/garage
|
50186a9630df038aeba36d6b06b006ab32ed48f5
|
[
"MIT"
] | null | null | null |
tests/garage/torch/policies/test_gaussian_mlp_policy.py
|
st2yang/garage
|
50186a9630df038aeba36d6b06b006ab32ed48f5
|
[
"MIT"
] | null | null | null |
"""Test Gaussian MLP Policy."""
import pickle
import numpy as np
import pytest
import torch
from torch import nn
from garage.envs import GarageEnv
from garage.torch.policies import GaussianMLPPolicy
from tests.fixtures.envs.dummy import DummyBoxEnv
class TestGaussianMLPPolicies:
"""Class for Testing Gaussian MlP Policy."""
# yapf: disable
@pytest.mark.parametrize('hidden_sizes', [
(1, ), (2, ), (3, ), (1, 4), (3, 5)])
# yapf: enable
def test_get_action(self, hidden_sizes):
"""Test get_action function."""
env_spec = GarageEnv(DummyBoxEnv())
obs_dim = env_spec.observation_space.flat_dim
act_dim = env_spec.action_space.flat_dim
obs = torch.ones(obs_dim, dtype=torch.float32)
init_std = 2.
policy = GaussianMLPPolicy(env_spec=env_spec,
hidden_sizes=hidden_sizes,
init_std=init_std,
hidden_nonlinearity=None,
std_parameterization='exp',
hidden_w_init=nn.init.ones_,
output_w_init=nn.init.ones_)
dist = policy(obs)[0]
expected_mean = torch.full(
(act_dim, ), obs_dim * (torch.Tensor(hidden_sizes).prod().item()))
expected_variance = init_std**2
action, prob = policy.get_action(obs)
assert np.array_equal(prob['mean'], expected_mean.numpy())
assert dist.variance.equal(torch.full((act_dim, ), expected_variance))
assert action.shape == (act_dim, )
# yapf: disable
@pytest.mark.parametrize('hidden_sizes', [
(1, ), (2, ), (3, ), (1, 4), (3, 5)])
# yapf: enable
def test_get_action_np(self, hidden_sizes):
"""Test get_action function with numpy inputs."""
env_spec = GarageEnv(DummyBoxEnv())
obs_dim = env_spec.observation_space.flat_dim
act_dim = env_spec.action_space.flat_dim
obs = np.ones(obs_dim, dtype=np.float32)
init_std = 2.
policy = GaussianMLPPolicy(env_spec=env_spec,
hidden_sizes=hidden_sizes,
init_std=init_std,
hidden_nonlinearity=None,
std_parameterization='exp',
hidden_w_init=nn.init.ones_,
output_w_init=nn.init.ones_)
dist = policy(torch.from_numpy(obs))[0]
expected_mean = torch.full(
(act_dim, ), obs_dim * (torch.Tensor(hidden_sizes).prod().item()))
expected_variance = init_std**2
action, prob = policy.get_action(obs)
assert np.array_equal(prob['mean'], expected_mean.numpy())
assert dist.variance.equal(torch.full((act_dim, ), expected_variance))
assert action.shape == (act_dim, )
# yapf: disable
@pytest.mark.parametrize('batch_size, hidden_sizes', [
(1, (1, )),
(5, (3, )),
(8, (4, )),
(15, (1, 2)),
(30, (3, 4, 10)),
])
# yapf: enable
def test_get_actions(self, batch_size, hidden_sizes):
"""Test get_actions function."""
env_spec = GarageEnv(DummyBoxEnv())
obs_dim = env_spec.observation_space.flat_dim
act_dim = env_spec.action_space.flat_dim
obs = torch.ones([batch_size, obs_dim], dtype=torch.float32)
init_std = 2.
policy = GaussianMLPPolicy(env_spec=env_spec,
hidden_sizes=hidden_sizes,
init_std=init_std,
hidden_nonlinearity=None,
std_parameterization='exp',
hidden_w_init=nn.init.ones_,
output_w_init=nn.init.ones_)
dist = policy(obs)[0]
expected_mean = torch.full([batch_size, act_dim],
obs_dim *
(torch.Tensor(hidden_sizes).prod().item()))
expected_variance = init_std**2
action, prob = policy.get_actions(obs)
assert np.array_equal(prob['mean'], expected_mean.numpy())
assert dist.variance.equal(
torch.full((batch_size, act_dim), expected_variance))
assert action.shape == (batch_size, act_dim)
# yapf: disable
@pytest.mark.parametrize('batch_size, hidden_sizes', [
(1, (1, )),
(5, (3, )),
(8, (4, )),
(15, (1, 2)),
(30, (3, 4, 10)),
])
# yapf: enable
def test_get_actions_np(self, batch_size, hidden_sizes):
"""Test get_actions function with numpy inputs."""
env_spec = GarageEnv(DummyBoxEnv())
obs_dim = env_spec.observation_space.flat_dim
act_dim = env_spec.action_space.flat_dim
obs = np.ones((batch_size, obs_dim), dtype=np.float32)
init_std = 2.
policy = GaussianMLPPolicy(env_spec=env_spec,
hidden_sizes=hidden_sizes,
init_std=init_std,
hidden_nonlinearity=None,
std_parameterization='exp',
hidden_w_init=nn.init.ones_,
output_w_init=nn.init.ones_)
dist = policy(torch.from_numpy(obs))[0]
expected_mean = torch.full([batch_size, act_dim],
obs_dim *
(torch.Tensor(hidden_sizes).prod().item()))
expected_variance = init_std**2
action, prob = policy.get_actions(obs)
assert np.array_equal(prob['mean'], expected_mean.numpy())
assert dist.variance.equal(
torch.full((batch_size, act_dim), expected_variance))
assert action.shape == (batch_size, act_dim)
# yapf: disable
@pytest.mark.parametrize('batch_size, hidden_sizes', [
(1, (1, )),
(6, (3, )),
(11, (6, )),
(25, (3, 5)),
(34, (2, 10, 11)),
])
# yapf: enable
def test_is_pickleable(self, batch_size, hidden_sizes):
"""Test if policy is pickleable."""
env_spec = GarageEnv(DummyBoxEnv())
obs_dim = env_spec.observation_space.flat_dim
obs = torch.ones([batch_size, obs_dim], dtype=torch.float32)
init_std = 2.
policy = GaussianMLPPolicy(env_spec=env_spec,
hidden_sizes=hidden_sizes,
init_std=init_std,
hidden_nonlinearity=None,
std_parameterization='exp',
hidden_w_init=nn.init.ones_,
output_w_init=nn.init.ones_)
output1_action, output1_prob = policy.get_actions(obs)
p = pickle.dumps(policy)
policy_pickled = pickle.loads(p)
output2_action, output2_prob = policy_pickled.get_actions(obs)
assert np.array_equal(output1_prob['mean'], output2_prob['mean'])
assert output1_action.shape == output2_action.shape
| 38.759358
| 78
| 0.544702
| 804
| 7,248
| 4.630597
| 0.121891
| 0.070911
| 0.018802
| 0.029546
| 0.86382
| 0.857642
| 0.847703
| 0.824335
| 0.824335
| 0.803922
| 0
| 0.020017
| 0.345199
| 7,248
| 186
| 79
| 38.967742
| 0.764433
| 0.051325
| 0
| 0.807143
| 0
| 0
| 0.019763
| 0
| 0
| 0
| 0
| 0
| 0.1
| 1
| 0.035714
| false
| 0
| 0.057143
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
76efdbe4e5a700ae903bf5f000c6c4581203a1c8
| 87,788
|
py
|
Python
|
TWLight/resources/migrations/0001_initial_squashed_0062_auto_20190220_1639.py
|
nicole331/TWLight
|
fab9002e76868f8a2ef36f9279c777de34243b2c
|
[
"MIT"
] | 67
|
2017-12-14T22:27:48.000Z
|
2022-03-13T18:21:31.000Z
|
TWLight/resources/migrations/0001_initial_squashed_0062_auto_20190220_1639.py
|
nicole331/TWLight
|
fab9002e76868f8a2ef36f9279c777de34243b2c
|
[
"MIT"
] | 433
|
2017-03-24T22:51:23.000Z
|
2022-03-31T19:36:22.000Z
|
TWLight/resources/migrations/0001_initial_squashed_0062_auto_20190220_1639.py
|
Mahuton/TWLight
|
90b299d07b0479f21dc90e17b8d05f5a221b0de1
|
[
"MIT"
] | 105
|
2017-06-23T03:53:41.000Z
|
2022-03-30T17:24:29.000Z
|
# Generated by Django 3.0.11 on 2021-01-31 06:22
import TWLight.resources.models
import datetime
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.manager
from django.utils.timezone import utc
import django_countries.fields
# Functions from the following migrations need manual copying.
# Move them and any dependencies into this file, then update the
# RunPython operations to refer to the local versions:
# TWLight.resources.migrations.0013_auto_20161207_1505
# TWLight.resources.migrations.0015_auto_20161208_1526
# TWLight.resources.migrations.0020_move_to_internal_durationfield
# Functions from the following migrations need skipped
# TWLight.resources.migrations.0045_migrate_tags
def fix_partner_status(apps, schema_editor):
Partner = apps.get_model("resources", "Partner")
for partner in Partner.objects.all():
# This should be Partner.AVAILABLE. We can't reference that directly
# since it's not available to the migrations, though. Careful!
partner.status = 0
partner.save()
def initialize_languages(apps, schema_editor):
"""
Make sure the database starts with a few languages we know Partners offer.
(This will also make it easier for administrators to use the language
field in the admin site.)
"""
Language = apps.get_model("resources", "Language")
basic_codes = ["en", "fr", "fa"]
for code in basic_codes:
lang = Language(language=code)
lang.save()
def copy_access_grant_terms(apps, schema_editor):
Partner = apps.get_model("resources", "Partner")
# Although this looks like it should only get AVAILABLE Partners (since
# we've defined a custom manager), in fact it uses the Django default
# internal manager and finds all Partners.
for partner in Partner.objects.all():
partner.access_grant_term_pythonic = partner.access_grant_term
partner.save()
def delete_access_grant_terms(apps, schema_editor):
Partner = apps.get_model("resources", "Partner")
for partner in Partner.objects.all():
partner.access_grant_term_pythonic = None
partner.save()
class Migration(migrations.Migration):
dependencies = [
("contenttypes", "0002_remove_content_type_name"),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name="Partner",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"company_name",
models.CharField(
help_text="Partner organization's name (e.g. McFarland). Note: this will be user-visible and *not translated*.",
max_length=40,
),
),
(
"terms_of_use",
models.URLField(
blank=True,
help_text="Link to terms of use. Required if this company requires that users agree to terms of use as a condition of applying for access; optional otherwise.",
null=True,
),
),
(
"description",
models.TextField(
blank=True,
help_text=b"Optional description of this partner's offerings.",
null=True,
),
),
(
"mutually_exclusive",
models.BooleanField(
default=None,
null=True,
help_text=b"If True, users can only apply for one Stream at a time from this Partner. If False, users can apply for multiple Streams at a time. This field must be filled in when Partners have multiple Streams, but may be left blank otherwise.",
),
),
(
"real_name",
models.BooleanField(
default=False,
help_text="Mark as true if this partner requires applicants to specify their real names.",
),
),
(
"country_of_residence",
models.BooleanField(
default=False,
help_text="Mark as true if this partner requires applicants to specify their countries of residence.",
),
),
(
"specific_title",
models.BooleanField(
default=False,
help_text="Mark as true if this partner requires applicants to specify a particular title they want to access.",
),
),
(
"specific_stream",
models.BooleanField(
default=False,
help_text="Mark as true if this partner requires applicants to specify a particular database they want to access.",
),
),
(
"occupation",
models.BooleanField(
default=False,
help_text="Mark as true if this partner requires applicants to specify their occupation.",
),
),
(
"affiliation",
models.BooleanField(
default=False,
help_text="Mark as true if this partner requires applicants to specify their institutional affiliation.",
),
),
(
"agreement_with_terms_of_use",
models.BooleanField(
default=False,
help_text="Mark as true if this partner requires applicants to agree with the partner's terms of use.",
),
),
(
"access_grant_term",
models.DurationField(
blank=True,
help_text=b"The standard length of an access grant from this Partner. Enter like '365 days' or '365d' or '1 year'.",
null=True,
),
),
(
"date_created",
models.DateField(
auto_now_add=True,
default=datetime.datetime(
2016, 5, 9, 19, 18, 3, 475335, tzinfo=utc
),
),
),
(
"logo_url",
models.URLField(
blank=True,
help_text="Optional URL of an image that can be used to represent this partner.",
null=True,
),
),
(
"status",
models.IntegerField(
choices=[(0, "Available"), (1, "Not available")], default=1
),
),
],
options={
"verbose_name": "partner",
"verbose_name_plural": "partners",
"ordering": ["company_name"],
},
),
migrations.CreateModel(
name="Stream",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"name",
models.CharField(
help_text=b"Name of stream (e.g. 'Health and Behavioral Sciences). Will be user-visible and *not translated*. Do not include the name of the partner here. If partner name and resource name need to be presented together, templates are responsible for presenting them in a format that can be internationalized.",
max_length=50,
),
),
(
"description",
models.TextField(
blank=True,
help_text=b"Optional description of this stream's contents.",
null=True,
),
),
(
"partner",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="streams",
to="resources.Partner",
),
),
],
options={
"verbose_name": "collection",
"verbose_name_plural": "collections",
"ordering": ["partner", "name"],
},
),
migrations.CreateModel(
name="Contact",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"title",
models.CharField(
help_text="Organizational role or job title. This is NOT intended to be used for honorifics.",
max_length=30,
),
),
("email", models.EmailField(max_length=75)),
("full_name", models.CharField(max_length=50)),
(
"short_name",
models.CharField(
help_text=b"The form of the contact person's name to use in email greetings (as in 'Hi Jake')",
max_length=15,
),
),
(
"partner",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="contacts",
to="resources.Partner",
),
),
],
options={
"verbose_name": "contact person",
"verbose_name_plural": "contact people",
},
),
migrations.RunPython(
code=fix_partner_status,
),
migrations.CreateModel(
name="Language",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"language",
models.CharField(
choices=[
(b"af", b"Afrikaans"),
(b"ar", b"Arabic"),
(b"ast", b"Asturian"),
(b"az", b"Azerbaijani"),
(b"bg", b"Bulgarian"),
(b"be", b"Belarusian"),
(b"bn", b"Bengali"),
(b"br", b"Breton"),
(b"bs", b"Bosnian"),
(b"ca", b"Catalan"),
(b"cs", b"Czech"),
(b"cy", b"Welsh"),
(b"da", b"Danish"),
(b"de", b"German"),
(b"el", b"Greek"),
(b"en", b"English"),
(b"en-au", b"Australian English"),
(b"en-gb", b"British English"),
(b"eo", b"Esperanto"),
(b"es", b"Spanish"),
(b"es-ar", b"Argentinian Spanish"),
(b"es-mx", b"Mexican Spanish"),
(b"es-ni", b"Nicaraguan Spanish"),
(b"es-ve", b"Venezuelan Spanish"),
(b"et", b"Estonian"),
(b"eu", b"Basque"),
(b"fa", b"Persian"),
(b"fi", b"Finnish"),
(b"fr", b"French"),
(b"fy", b"Frisian"),
(b"ga", b"Irish"),
(b"gl", b"Galician"),
(b"he", b"Hebrew"),
(b"hi", b"Hindi"),
(b"hr", b"Croatian"),
(b"hu", b"Hungarian"),
(b"ia", b"Interlingua"),
(b"id", b"Indonesian"),
(b"io", b"Ido"),
(b"is", b"Icelandic"),
(b"it", b"Italian"),
(b"ja", b"Japanese"),
(b"ka", b"Georgian"),
(b"kk", b"Kazakh"),
(b"km", b"Khmer"),
(b"kn", b"Kannada"),
(b"ko", b"Korean"),
(b"lb", b"Luxembourgish"),
(b"lt", b"Lithuanian"),
(b"lv", b"Latvian"),
(b"mk", b"Macedonian"),
(b"ml", b"Malayalam"),
(b"mn", b"Mongolian"),
(b"mr", b"Marathi"),
(b"my", b"Burmese"),
(b"nb", b"Norwegian Bokmal"),
(b"ne", b"Nepali"),
(b"nl", b"Dutch"),
(b"nn", b"Norwegian Nynorsk"),
(b"os", b"Ossetic"),
(b"pa", b"Punjabi"),
(b"pl", b"Polish"),
(b"pt", b"Portuguese"),
(b"pt-br", b"Brazilian Portuguese"),
(b"ro", b"Romanian"),
(b"ru", b"Russian"),
(b"sk", b"Slovak"),
(b"sl", b"Slovenian"),
(b"sq", b"Albanian"),
(b"sr", b"Serbian"),
(b"sr-latn", b"Serbian Latin"),
(b"sv", b"Swedish"),
(b"sw", b"Swahili"),
(b"ta", b"Tamil"),
(b"te", b"Telugu"),
(b"th", b"Thai"),
(b"tr", b"Turkish"),
(b"tt", b"Tatar"),
(b"udm", b"Udmurt"),
(b"uk", b"Ukrainian"),
(b"ur", b"Urdu"),
(b"vi", b"Vietnamese"),
(b"zh-cn", b"Simplified Chinese"),
(b"zh-hans", b"Simplified Chinese"),
(b"zh-hant", b"Traditional Chinese"),
(b"zh-tw", b"Traditional Chinese"),
],
error_messages={
b"invalid_choice": "You must enter an ISO language code, as in the LANGUAGES setting at https://github.com/django/django/blob/master/django/conf/global_settings.py"
},
max_length=8,
),
),
],
options={
"verbose_name": "Language",
"verbose_name_plural": "Languages",
},
),
migrations.AddField(
model_name="partner",
name="languages",
field=models.ManyToManyField(
blank=True, null=True, to="resources.Language"
),
),
migrations.AlterField(
model_name="partner",
name="status",
field=models.IntegerField(
choices=[(0, "Available"), (1, "Not available")],
default=1,
help_text="Should this Partner be displayed to end users? Is it open for applications right now?",
),
),
migrations.RunPython(
code=initialize_languages,
),
migrations.AddField(
model_name="stream",
name="languages",
field=models.ManyToManyField(
blank=True, null=True, to="resources.Language"
),
),
migrations.AlterField(
model_name="language",
name="language",
field=models.CharField(
choices=[
(b"af", b"Afrikaans"),
(b"ar", b"Arabic"),
(b"ast", b"Asturian"),
(b"az", b"Azerbaijani"),
(b"bg", b"Bulgarian"),
(b"be", b"Belarusian"),
(b"bn", b"Bengali"),
(b"br", b"Breton"),
(b"bs", b"Bosnian"),
(b"ca", b"Catalan"),
(b"cs", b"Czech"),
(b"cy", b"Welsh"),
(b"da", b"Danish"),
(b"de", b"German"),
(b"el", b"Greek"),
(b"en", b"English"),
(b"en-au", b"Australian English"),
(b"en-gb", b"British English"),
(b"eo", b"Esperanto"),
(b"es", b"Spanish"),
(b"es-ar", b"Argentinian Spanish"),
(b"es-mx", b"Mexican Spanish"),
(b"es-ni", b"Nicaraguan Spanish"),
(b"es-ve", b"Venezuelan Spanish"),
(b"et", b"Estonian"),
(b"eu", b"Basque"),
(b"fa", b"Persian"),
(b"fi", b"Finnish"),
(b"fr", b"French"),
(b"fy", b"Frisian"),
(b"ga", b"Irish"),
(b"gl", b"Galician"),
(b"he", b"Hebrew"),
(b"hi", b"Hindi"),
(b"hr", b"Croatian"),
(b"hu", b"Hungarian"),
(b"ia", b"Interlingua"),
(b"id", b"Indonesian"),
(b"io", b"Ido"),
(b"is", b"Icelandic"),
(b"it", b"Italian"),
(b"ja", b"Japanese"),
(b"ka", b"Georgian"),
(b"kk", b"Kazakh"),
(b"km", b"Khmer"),
(b"kn", b"Kannada"),
(b"ko", b"Korean"),
(b"lb", b"Luxembourgish"),
(b"lt", b"Lithuanian"),
(b"lv", b"Latvian"),
(b"mk", b"Macedonian"),
(b"ml", b"Malayalam"),
(b"mn", b"Mongolian"),
(b"mr", b"Marathi"),
(b"my", b"Burmese"),
(b"nb", b"Norwegian Bokmal"),
(b"ne", b"Nepali"),
(b"nl", b"Dutch"),
(b"nn", b"Norwegian Nynorsk"),
(b"os", b"Ossetic"),
(b"pa", b"Punjabi"),
(b"pl", b"Polish"),
(b"pt", b"Portuguese"),
(b"pt-br", b"Brazilian Portuguese"),
(b"ro", b"Romanian"),
(b"ru", b"Russian"),
(b"sk", b"Slovak"),
(b"sl", b"Slovenian"),
(b"sq", b"Albanian"),
(b"sr", b"Serbian"),
(b"sr-latn", b"Serbian Latin"),
(b"sv", b"Swedish"),
(b"sw", b"Swahili"),
(b"ta", b"Tamil"),
(b"te", b"Telugu"),
(b"th", b"Thai"),
(b"tr", b"Turkish"),
(b"tt", b"Tatar"),
(b"udm", b"Udmurt"),
(b"uk", b"Ukrainian"),
(b"ur", b"Urdu"),
(b"vi", b"Vietnamese"),
(b"zh-cn", b"Simplified Chinese"),
(b"zh-hans", b"Simplified Chinese"),
(b"zh-hant", b"Traditional Chinese"),
(b"zh-tw", b"Traditional Chinese"),
],
max_length=8,
unique=True,
validators=[TWLight.resources.models.validate_language_code],
),
),
migrations.AlterField(
model_name="contact",
name="title",
field=models.CharField(
help_text="Organizational role or job title. This is NOT intended to be used for honorifics. Think 'Director of Editorial Services', not 'Ms.'",
max_length=75,
),
),
migrations.AddField(
model_name="partner",
name="access_grant_term_pythonic",
field=models.DurationField(
blank=True, default=datetime.timedelta(days=365), null=True
),
),
migrations.AlterField(
model_name="contact",
name="email",
field=models.EmailField(max_length=254),
),
migrations.RunPython(
code=copy_access_grant_terms,
reverse_code=delete_access_grant_terms,
),
migrations.RemoveField(
model_name="partner",
name="access_grant_term",
),
migrations.RenameField(
model_name="partner",
old_name="access_grant_term_pythonic",
new_name="access_grant_term",
),
migrations.RemoveField(
model_name="partner",
name="access_grant_term",
),
migrations.AlterField(
model_name="partner",
name="description",
field=models.TextField(
blank=True,
help_text="Optional description of this partner's offerings. You can enter HTML and it should render properly - if it does not, the developer forgot a | safe filter in the template.",
null=True,
),
),
migrations.AlterField(
model_name="stream",
name="description",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's contents. You can enter HTML and it should render properly - if it does not, the developer forgot a | safe filter in the template.",
null=True,
),
),
migrations.AlterField(
model_name="partner",
name="description",
field=models.TextField(
blank=True,
help_text="Optional description of this partner's offerings. You can enter HTML and it should render properly - if it does not, the developer forgot a | safe filter in the template. Whatever you enter here will also be automatically copied over to the description field for *your current language*, so you do not need to also fill that out.",
null=True,
),
),
migrations.AlterField(
model_name="partner",
name="languages",
field=models.ManyToManyField(
blank=True,
help_text="Select all languages in which this partner publishes content.",
null=True,
to="resources.Language",
),
),
migrations.AddField(
model_name="partner",
name="coordinator",
field=models.ForeignKey(
blank=True,
help_text="The coordinator for this Partner, if any.",
null=True,
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="partner",
name="languages",
field=models.ManyToManyField(
blank=True,
help_text="Select all languages in which this partner publishes content.",
to="resources.Language",
),
),
migrations.AlterField(
model_name="stream",
name="languages",
field=models.ManyToManyField(blank=True, to="resources.Language"),
),
migrations.AlterField(
model_name="partner",
name="status",
field=models.IntegerField(
choices=[(0, "Available"), (1, "Not available"), (2, "Waitlisted")],
default=1,
help_text="Should this Partner be displayed to end users? Is it open for applications right now?",
),
),
migrations.AddField(
model_name="partner",
name="tags",
field=models.TextField(
blank=True,
null=True,
default=None,
help_text="A comma-separated list of tags.",
verbose_name="Tags",
),
),
migrations.AddField(
model_name="partner",
name="renewals_available",
field=models.BooleanField(
default=False,
help_text="Can access grants to this partner be renewed? If so, users will be able to request renewals at any time.",
),
),
migrations.AlterField(
model_name="contact",
name="title",
field=models.CharField(
blank=True,
help_text="Organizational role or job title. This is NOT intended to be used for honorifics. Think 'Director of Editorial Services', not 'Ms.' Optional.",
max_length=75,
),
),
migrations.AlterField(
model_name="partner",
name="description",
field=models.TextField(
blank=True,
help_text="Optional description of this partner's offerings. You can enter wikicode and it should render properly - if it does not, the developer forgot a | safe filter in the template. Whatever you enter here will also be automatically copied over to the description field for *your current language*, so you do not need to also fill that out.",
null=True,
),
),
migrations.AlterField(
model_name="stream",
name="description",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's contents. You can enter wikicode and it should render properly - if it does not, the developer forgot a | safe filter in the template.",
null=True,
),
),
migrations.AlterField(
model_name="partner",
name="company_name",
field=models.CharField(
help_text="Partner's name (e.g. McFarland). Note: this will be user-visible and *not translated*.",
max_length=40,
),
),
migrations.AlterField(
model_name="partner",
name="country_of_residence",
field=models.BooleanField(
default=False,
help_text="Mark as true if this partner requires applicant countries of residence.",
),
),
migrations.AlterField(
model_name="partner",
name="description",
field=models.TextField(
blank=True,
help_text="Optional description of this partner's resources.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="description_en",
field=models.TextField(
blank=True,
help_text="Optional description of this partner's resources.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="description_fi",
field=models.TextField(
blank=True,
help_text="Optional description of this partner's resources.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="description_fr",
field=models.TextField(
blank=True,
help_text="Optional description of this partner's resources.",
null=True,
),
),
migrations.AlterField(
model_name="partner",
name="real_name",
field=models.BooleanField(
default=False,
help_text="Mark as true if this partner requires applicant names.",
),
),
migrations.AlterField(
model_name="partner",
name="specific_stream",
field=models.BooleanField(
default=False,
help_text="Mark as true if this partner requires applicants to specify the database they want to access.",
),
),
migrations.AlterField(
model_name="partner",
name="specific_title",
field=models.BooleanField(
default=False,
help_text="Mark as true if this partner requires applicants to specify the title they want to access.",
),
),
migrations.AlterField(
model_name="partner",
name="terms_of_use",
field=models.URLField(
blank=True,
help_text="Link to terms of use. Required if users must agree to terms of use to get access; optional otherwise.",
null=True,
),
),
migrations.AlterField(
model_name="stream",
name="description",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AlterField(
model_name="stream",
name="name",
field=models.CharField(
help_text="Name of stream (e.g. 'Health and Behavioral Sciences). Will be user-visible and *not translated*. Do not include the name of the partner here.",
max_length=50,
),
),
migrations.AddField(
model_name="stream",
name="description_en",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_fi",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_fr",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="send_instructions",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_en",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_fi",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_fr",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.RemoveField(
model_name="partner",
name="logo_url",
),
migrations.CreateModel(
name="PartnerLogo",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"logo",
models.ImageField(
blank=True,
help_text="Optional image file that can be used to represent this partner.",
null=True,
upload_to=b"",
),
),
(
"partner",
models.OneToOneField(
on_delete=django.db.models.deletion.CASCADE,
related_name="logos",
to="resources.Partner",
),
),
],
),
migrations.AddField(
model_name="partner",
name="bundle",
field=models.BooleanField(
default=False,
null=True,
help_text="Is this partner a part of the Bundle?",
),
),
migrations.AddField(
model_name="partner",
name="featured",
field=models.BooleanField(
default=False,
help_text="Mark as true to feature this partner on the front page.",
),
),
migrations.AddField(
model_name="partner",
name="account_email",
field=models.BooleanField(
default=False,
help_text="Mark as true if this partner requires applicants to have already signed up at the partner website.",
),
),
migrations.AddField(
model_name="partner",
name="registration_url",
field=models.URLField(
blank=True,
help_text="Link to registration page. Required if users must sign up on the partner's website in advance; optional otherwise.",
null=True,
),
),
migrations.AlterModelManagers(
name="partner",
managers=[
("even_not_available", django.db.models.manager.Manager()),
],
),
migrations.AlterField(
model_name="language",
name="language",
field=models.CharField(
choices=[
("af", "Afrikaans"),
("ar", "Arabic"),
("ast", "Asturian"),
("az", "Azerbaijani"),
("bg", "Bulgarian"),
("be", "Belarusian"),
("bn", "Bengali"),
("br", "Breton"),
("bs", "Bosnian"),
("ca", "Catalan"),
("cs", "Czech"),
("cy", "Welsh"),
("da", "Danish"),
("de", "German"),
("dsb", "Lower Sorbian"),
("el", "Greek"),
("en", "English"),
("en-au", "Australian English"),
("en-gb", "British English"),
("eo", "Esperanto"),
("es", "Spanish"),
("es-ar", "Argentinian Spanish"),
("es-co", "Colombian Spanish"),
("es-mx", "Mexican Spanish"),
("es-ni", "Nicaraguan Spanish"),
("es-ve", "Venezuelan Spanish"),
("et", "Estonian"),
("eu", "Basque"),
("fa", "Persian"),
("fi", "Finnish"),
("fr", "French"),
("fy", "Frisian"),
("ga", "Irish"),
("gd", "Scottish Gaelic"),
("gl", "Galician"),
("he", "Hebrew"),
("hi", "Hindi"),
("hr", "Croatian"),
("hsb", "Upper Sorbian"),
("hu", "Hungarian"),
("ia", "Interlingua"),
("id", "Indonesian"),
("io", "Ido"),
("is", "Icelandic"),
("it", "Italian"),
("ja", "Japanese"),
("ka", "Georgian"),
("kk", "Kazakh"),
("km", "Khmer"),
("kn", "Kannada"),
("ko", "Korean"),
("lb", "Luxembourgish"),
("lt", "Lithuanian"),
("lv", "Latvian"),
("mk", "Macedonian"),
("ml", "Malayalam"),
("mn", "Mongolian"),
("mr", "Marathi"),
("my", "Burmese"),
("nb", "Norwegian Bokmål"),
("ne", "Nepali"),
("nl", "Dutch"),
("nn", "Norwegian Nynorsk"),
("os", "Ossetic"),
("pa", "Punjabi"),
("pl", "Polish"),
("pt", "Portuguese"),
("pt-br", "Brazilian Portuguese"),
("ro", "Romanian"),
("ru", "Russian"),
("sk", "Slovak"),
("sl", "Slovenian"),
("sq", "Albanian"),
("sr", "Serbian"),
("sr-latn", "Serbian Latin"),
("sv", "Swedish"),
("sw", "Swahili"),
("ta", "Tamil"),
("te", "Telugu"),
("th", "Thai"),
("tr", "Turkish"),
("tt", "Tatar"),
("udm", "Udmurt"),
("uk", "Ukrainian"),
("ur", "Urdu"),
("vi", "Vietnamese"),
("zh-hans", "Simplified Chinese"),
("zh-hant", "Traditional Chinese"),
],
max_length=8,
unique=True,
validators=[TWLight.resources.models.validate_language_code],
),
),
migrations.AddField(
model_name="partner",
name="company_location",
field=django_countries.fields.CountryField(
help_text="Partner's primary location.", max_length=2, null=True
),
),
migrations.CreateModel(
name="TaggedTextField",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"object_id",
models.IntegerField(db_index=True, verbose_name="Object id"),
),
(
"content_type",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="resources_taggedtextfield_tagged_items",
to="contenttypes.ContentType",
verbose_name="Content type",
),
),
],
options={
"abstract": False,
},
),
migrations.CreateModel(
name="TextFieldTag",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.TextField(max_length=100, verbose_name="Name")),
(
"name_en",
models.TextField(max_length=100, null=True, verbose_name="Name"),
),
(
"name_fi",
models.TextField(max_length=100, null=True, verbose_name="Name"),
),
(
"name_fr",
models.TextField(max_length=100, null=True, verbose_name="Name"),
),
(
"slug",
models.SlugField(max_length=100, unique=True, verbose_name="Slug"),
),
],
options={
"verbose_name": "Tag",
"verbose_name_plural": "Tags",
},
),
migrations.AlterField(
model_name="partner",
name="tags",
field=models.TextField(
blank=True,
null=True,
default=None,
help_text="A comma-separated list of tags.",
verbose_name="Tags",
),
),
migrations.AddField(
model_name="taggedtextfield",
name="tag",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="resources_taggedtextfield_items",
to="resources.TextFieldTag",
),
),
migrations.AddField(
model_name="partner",
name="old_tags",
field=models.TextField(
blank=True,
null=True,
default=None,
help_text="A comma-separated list of tags.",
verbose_name="Old Tags",
),
),
migrations.AlterField(
model_name="language",
name="language",
field=models.CharField(
choices=[
("af", "Afrikaans"),
("ar", "العربية"),
("ast", "asturianu"),
("az", "az-latn"),
("be", "беларуская"),
("bg", "български"),
("bn", "বাংলা"),
("br", "brezhoneg"),
("bs", "bosanski"),
("ca", "català"),
("cs", "čeština"),
("cy", "Cymraeg"),
("da", "dansk"),
("de", "Deutsch"),
("dsb", "dolnoserbski"),
("el", "Ελληνικά"),
("en", "English"),
("en-gb", "British English"),
("eo", "Esperanto"),
("es", "español"),
("es-ni", "español nicaragüense"),
("et", "eesti"),
("eu", "euskara"),
("fa", "فارسی"),
("fi", "suomi"),
("fr", "français"),
("fy", "Frysk"),
("ga", "Gaeilge"),
("gd", "Gàidhlig"),
("gl", "galego"),
("he", "עברית"),
("hi", "हिन्दी"),
("hr", "hrvatski"),
("hsb", "hornjoserbsce"),
("hu", "magyar"),
("ia", "interlingua"),
("id", "Bahasa Indonesia"),
("io", "Ido"),
("is", "íslenska"),
("it", "italiano"),
("ja", "日本語"),
("ka", "ქართული"),
("kk", "kk-cyrl"),
("km", "ភាសាខ្មែរ"),
("kn", "ಕನ್ನಡ"),
("ko", "한국어"),
("lb", "Lëtzebuergesch"),
("lt", "lietuvių"),
("lv", "latviešu"),
("mk", "македонски"),
("ml", "മലയാളം"),
("mn", "монгол"),
("mr", "मराठी"),
("my", "မြန်မာဘာသာ"),
("nb", "norsk (bokmål)"),
("ne", "नेपाली"),
("nl", "Nederlands"),
("nn", "norsk (nynorsk)"),
("os", "Ирон"),
("pa", "pa-guru"),
("pl", "polski"),
("pt", "português"),
("pt-br", "português do Brasil"),
("ro", "română"),
("ru", "русский"),
("sk", "slovenčina"),
("sl", "slovenščina"),
("sq", "shqip"),
("sr", "sr-cyrl"),
("sr-latn", "srpski"),
("sv", "svenska"),
("sw", "Kiswahili"),
("ta", "தமிழ்"),
("te", "తెలుగు"),
("th", "ไทย"),
("tr", "Türkçe"),
("tt", "татарча"),
("udm", "удмурт"),
("uk", "українська"),
("ur", "اردو"),
("vi", "Tiếng Việt"),
("zh-hans", "中文(简体)"),
("zh-hant", "中文(繁體)"),
],
max_length=8,
unique=True,
validators=[TWLight.resources.models.validate_language_code],
),
),
migrations.AlterField(
model_name="partner",
name="coordinator",
field=models.ForeignKey(
blank=True,
help_text="The coordinator for this Partner, if any.",
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to=settings.AUTH_USER_MODEL,
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_ar",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_ar",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="textfieldtag",
name="name_ar",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.AlterField(
model_name="partner",
name="status",
field=models.IntegerField(
choices=[(0, "Available"), (1, "Not available"), (2, "Waitlisted")],
default=1,
help_text="Should this Partner be displayed to users? Is it open for applications right now?",
),
),
migrations.AddField(
model_name="partner",
name="excerpt_limit",
field=models.PositiveSmallIntegerField(
blank=True,
help_text="Optional excerpt limit in terms of number of words per article. Leave empty if no limit.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="excerpt_limit_percentage",
field=models.PositiveSmallIntegerField(
blank=True,
help_text="Optional excerpt limit in terms of percentage (%) of an article. Leave empty if no limit.",
null=True,
validators=[django.core.validators.MaxValueValidator(100)],
),
),
migrations.RenameField(
model_name="partner",
old_name="description",
new_name="short_description",
),
migrations.RenameField(
model_name="partner",
old_name="description_en",
new_name="short_description_en",
),
migrations.RenameField(
model_name="partner",
old_name="description_fi",
new_name="short_description_fi",
),
migrations.RenameField(
model_name="partner",
old_name="description_fr",
new_name="short_description_fr",
),
migrations.AddField(
model_name="partner",
name="description",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
verbose_name=b"long description",
),
),
migrations.AddField(
model_name="partner",
name="description_ar",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name=b"long description",
),
),
migrations.AddField(
model_name="partner",
name="description_en",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name=b"long description",
),
),
migrations.AddField(
model_name="partner",
name="description_fi",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name=b"long description",
),
),
migrations.AddField(
model_name="partner",
name="description_fr",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name=b"long description",
),
),
migrations.AlterField(
model_name="partner",
name="short_description",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_ar",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AlterField(
model_name="partner",
name="short_description_en",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AlterField(
model_name="partner",
name="short_description_fi",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AlterField(
model_name="partner",
name="short_description_fr",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="partner",
name="accounts_available",
field=models.PositiveSmallIntegerField(
blank=True,
help_text="Add number of new accounts to the existing value, not by reseting it to zero.",
null=True,
),
),
migrations.AddField(
model_name="stream",
name="accounts_available",
field=models.PositiveSmallIntegerField(
blank=True,
help_text="Add number of new accounts to the existing value, not by reseting it to zero.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="description_ko",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name=b"long description",
),
),
migrations.AddField(
model_name="partner",
name="description_mk",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name=b"long description",
),
),
migrations.AddField(
model_name="partner",
name="description_my",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name=b"long description",
),
),
migrations.AddField(
model_name="partner",
name="description_pt",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name=b"long description",
),
),
migrations.AddField(
model_name="partner",
name="description_pt_br",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name=b"long description",
),
),
migrations.AddField(
model_name="partner",
name="description_sv",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name=b"long description",
),
),
migrations.AddField(
model_name="partner",
name="description_tr",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name=b"long description",
),
),
migrations.AddField(
model_name="partner",
name="description_zh_hans",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name=b"long description",
),
),
migrations.AddField(
model_name="partner",
name="description_zh_hant",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name=b"long description",
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_ko",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_mk",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_my",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_pt",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_pt_br",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_sv",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_tr",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_zh_hans",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_zh_hant",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_ko",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_mk",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_my",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_pt",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_pt_br",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_sv",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_tr",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_zh_hans",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_zh_hant",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_ko",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_mk",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_my",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_pt",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_pt_br",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_sv",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_tr",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_zh_hans",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_zh_hant",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="textfieldtag",
name="name_ko",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.AddField(
model_name="textfieldtag",
name="name_mk",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.AddField(
model_name="textfieldtag",
name="name_my",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.AddField(
model_name="textfieldtag",
name="name_pt",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.AddField(
model_name="textfieldtag",
name="name_pt_br",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.AddField(
model_name="textfieldtag",
name="name_sv",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.AddField(
model_name="textfieldtag",
name="name_tr",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.AddField(
model_name="textfieldtag",
name="name_zh_hans",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.AddField(
model_name="textfieldtag",
name="name_zh_hant",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.AddField(
model_name="partner",
name="description_da",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name=b"long description",
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_da",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_da",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_da",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="textfieldtag",
name="name_da",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.CreateModel(
name="Video",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"tutorial_video_url",
models.URLField(
blank=True, help_text="URL of a video tutorial.", null=True
),
),
(
"partner",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="videos",
to="resources.Partner",
),
),
],
options={
"ordering": ["partner"],
"verbose_name": "video tutorial",
"verbose_name_plural": "video tutorials",
},
),
migrations.AddField(
model_name="partner",
name="description_br",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name=b"long description",
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_br",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_br",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_br",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="textfieldtag",
name="name_br",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.AddField(
model_name="partner",
name="description_de",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name=b"long description",
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_de",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_de",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_de",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="textfieldtag",
name="name_de",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.CreateModel(
name="Suggestion",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"suggested_company_name",
models.CharField(
help_text="Potential partner's name (e.g. McFarland).",
max_length=40,
),
),
(
"description",
models.TextField(
blank=True,
help_text="Optional description of this potential partner.",
max_length=1000,
),
),
(
"company_url",
models.URLField(
blank=True,
help_text="Link to the potential partner's website.",
null=True,
),
),
(
"author",
models.ForeignKey(
blank=True,
help_text="User who authored this suggestion.",
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="suggestion_author",
to=settings.AUTH_USER_MODEL,
),
),
(
"upvoted_users",
models.ManyToManyField(
blank=True,
help_text="Users who have upvoted this suggestion.",
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"ordering": ["suggested_company_name"],
"verbose_name": "suggestion",
"verbose_name_plural": "suggestions",
},
),
migrations.AddField(
model_name="partner",
name="description_fa",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name=b"long description",
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_fa",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_fa",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_fa",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="textfieldtag",
name="name_fa",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.AddField(
model_name="partner",
name="description_ru",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name=b"long description",
),
),
migrations.AddField(
model_name="partner",
name="description_ta",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name=b"long description",
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_ru",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_ta",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_ru",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_ta",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_ru",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_ta",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="textfieldtag",
name="name_ru",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.AddField(
model_name="textfieldtag",
name="name_ta",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.AddField(
model_name="partner",
name="description_hi",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name=b"long description",
),
),
migrations.AddField(
model_name="partner",
name="description_mr",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name=b"long description",
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_hi",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_mr",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_hi",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_mr",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_hi",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_mr",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="textfieldtag",
name="name_hi",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.AddField(
model_name="textfieldtag",
name="name_mr",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.CreateModel(
name="AccessCode",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"code",
models.CharField(
help_text="An access code for this partner.", max_length=60
),
),
],
options={
"verbose_name": "access code",
"verbose_name_plural": "access codes",
},
),
migrations.RemoveField(
model_name="partner",
name="bundle",
),
migrations.AddField(
model_name="partner",
name="authorization_method",
field=models.IntegerField(
choices=[
(0, "Email"),
(1, "Access codes"),
(2, "Proxy"),
(3, "Library Bundle"),
],
default=0,
help_text="Which authorization method does this partner use? 'Email' means the accounts are set up via email, and is the default. Select 'Access Codes' if we send individual, or group, login details or access codes. 'Proxy' means access delivered directly via EZProxy, and Library Bundle is automated proxy-based access.",
),
),
]
| 38.452913
| 362
| 0.45758
| 7,426
| 87,788
| 5.300431
| 0.102478
| 0.03727
| 0.071289
| 0.083687
| 0.820152
| 0.808059
| 0.78405
| 0.770458
| 0.754783
| 0.750946
| 0
| 0.006158
| 0.439491
| 87,788
| 2,282
| 363
| 38.469763
| 0.793196
| 0.011084
| 0
| 0.777036
| 1
| 0.018692
| 0.273157
| 0.005036
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00178
| false
| 0
| 0.00356
| 0
| 0.006676
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
76f62590f904ac02ec22387ebed471484f1497fd
| 4,533
|
py
|
Python
|
tests/core/test_datamodules.py
|
stas00/pytorch-lightning
|
84c507c4df5f5c336deb19ce7f70fa02329f39f6
|
[
"Apache-2.0"
] | 1
|
2021-06-10T07:12:58.000Z
|
2021-06-10T07:12:58.000Z
|
tests/core/test_datamodules.py
|
stas00/pytorch-lightning
|
84c507c4df5f5c336deb19ce7f70fa02329f39f6
|
[
"Apache-2.0"
] | null | null | null |
tests/core/test_datamodules.py
|
stas00/pytorch-lightning
|
84c507c4df5f5c336deb19ce7f70fa02329f39f6
|
[
"Apache-2.0"
] | null | null | null |
import pickle
import torch
import pytest
from pytorch_lightning import Trainer
from tests.base.datamodules import TrialMNISTDataModule
from tests.base import EvalModelTemplate
from argparse import ArgumentParser
def test_base_datamodule(tmpdir):
dm = TrialMNISTDataModule()
dm.prepare_data()
dm.setup()
def test_dm_add_argparse_args(tmpdir):
parser = ArgumentParser()
parser = TrialMNISTDataModule.add_argparse_args(parser)
args = parser.parse_args(['--data_dir', './my_data'])
assert args.data_dir == './my_data'
def test_dm_init_from_argparse_args(tmpdir):
parser = ArgumentParser()
parser = TrialMNISTDataModule.add_argparse_args(parser)
args = parser.parse_args(['--data_dir', './my_data'])
dm = TrialMNISTDataModule.from_argparse_args(args)
dm.prepare_data()
dm.setup()
def test_dm_pickle_after_init(tmpdir):
dm = TrialMNISTDataModule()
pickle.dumps(dm)
def test_dm_pickle_after_setup(tmpdir):
dm = TrialMNISTDataModule()
dm.prepare_data()
dm.setup()
pickle.dumps(dm)
def test_train_loop_only(tmpdir):
dm = TrialMNISTDataModule(tmpdir)
dm.prepare_data()
dm.setup()
model = EvalModelTemplate()
model.validation_step = None
model.validation_step_end = None
model.validation_epoch_end = None
model.test_step = None
model.test_step_end = None
model.test_epoch_end = None
trainer = Trainer(
default_root_dir=tmpdir,
max_epochs=3,
weights_summary=None,
)
trainer.fit(model, dm)
# fit model
result = trainer.fit(model)
assert result == 1
assert trainer.callback_metrics['loss'] < 0.50
def test_train_val_loop_only(tmpdir):
dm = TrialMNISTDataModule(tmpdir)
dm.prepare_data()
dm.setup()
model = EvalModelTemplate()
model.validation_step = None
model.validation_step_end = None
model.validation_epoch_end = None
trainer = Trainer(
default_root_dir=tmpdir,
max_epochs=3,
weights_summary=None,
)
trainer.fit(model, dm)
# fit model
result = trainer.fit(model)
assert result == 1
assert trainer.callback_metrics['loss'] < 0.50
def test_full_loop(tmpdir):
dm = TrialMNISTDataModule(tmpdir)
dm.prepare_data()
dm.setup()
model = EvalModelTemplate()
trainer = Trainer(
default_root_dir=tmpdir,
max_epochs=3,
weights_summary=None,
)
trainer.fit(model, dm)
# fit model
result = trainer.fit(model)
assert result == 1
# test
result = trainer.test(datamodule=dm)
result = result[0]
assert result['test_acc'] > 0.8
@pytest.mark.skipif(torch.cuda.device_count() < 1, reason="test requires multi-GPU machine")
def test_full_loop_single_gpu(tmpdir):
dm = TrialMNISTDataModule(tmpdir)
dm.prepare_data()
dm.setup()
model = EvalModelTemplate()
trainer = Trainer(
default_root_dir=tmpdir,
max_epochs=3,
weights_summary=None,
gpus=1
)
trainer.fit(model, dm)
# fit model
result = trainer.fit(model)
assert result == 1
# test
result = trainer.test(datamodule=dm)
result = result[0]
assert result['test_acc'] > 0.8
@pytest.mark.skipif(torch.cuda.device_count() < 2, reason="test requires multi-GPU machine")
def test_full_loop_dp(tmpdir):
dm = TrialMNISTDataModule(tmpdir)
dm.prepare_data()
dm.setup()
model = EvalModelTemplate()
trainer = Trainer(
default_root_dir=tmpdir,
max_epochs=3,
weights_summary=None,
distributed_backend='dp',
gpus=2
)
trainer.fit(model, dm)
# fit model
result = trainer.fit(model)
assert result == 1
# test
result = trainer.test(datamodule=dm)
result = result[0]
assert result['test_acc'] > 0.8
@pytest.mark.skipif(torch.cuda.device_count() < 2, reason="test requires multi-GPU machine")
def test_full_loop_ddp_spawn(tmpdir):
import os
os.environ['CUDA_VISIBLE_DEVICES'] = '0,1'
dm = TrialMNISTDataModule(tmpdir)
dm.prepare_data()
dm.setup()
model = EvalModelTemplate()
trainer = Trainer(
default_root_dir=tmpdir,
max_epochs=3,
weights_summary=None,
distributed_backend='ddp_spawn',
gpus=[0, 1]
)
trainer.fit(model, dm)
# fit model
result = trainer.fit(model)
assert result == 1
# test
result = trainer.test(datamodule=dm)
result = result[0]
assert result['test_acc'] > 0.8
| 22.893939
| 92
| 0.668652
| 560
| 4,533
| 5.205357
| 0.148214
| 0.0494
| 0.06175
| 0.046312
| 0.840137
| 0.81235
| 0.81235
| 0.81235
| 0.766381
| 0.766381
| 0
| 0.011121
| 0.22634
| 4,533
| 197
| 93
| 23.010152
| 0.820074
| 0.017428
| 0
| 0.739437
| 0
| 0
| 0.048166
| 0
| 0
| 0
| 0
| 0
| 0.091549
| 1
| 0.077465
| false
| 0
| 0.056338
| 0
| 0.133803
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a0919ea1dc6cbcecf7715d2ae433886c80b4a21
| 166
|
py
|
Python
|
tools/leetcode.242.Valid Anagram/leetcode.242.Valid Anagram.submission1.py
|
tedye/leetcode
|
975d7e3b8cb9b6be9e80e07febf4bcf6414acd46
|
[
"MIT"
] | 4
|
2015-10-10T00:30:55.000Z
|
2020-07-27T19:45:54.000Z
|
tools/leetcode.242.Valid Anagram/leetcode.242.Valid Anagram.submission1.py
|
tedye/leetcode
|
975d7e3b8cb9b6be9e80e07febf4bcf6414acd46
|
[
"MIT"
] | null | null | null |
tools/leetcode.242.Valid Anagram/leetcode.242.Valid Anagram.submission1.py
|
tedye/leetcode
|
975d7e3b8cb9b6be9e80e07febf4bcf6414acd46
|
[
"MIT"
] | null | null | null |
class Solution:
# @param {string} s
# @param {string} t
# @return {boolean}
def isAnagram(self, s, t):
return sorted(list(s))==sorted(list(t))
| 166
| 166
| 0.578313
| 22
| 166
| 4.363636
| 0.590909
| 0.229167
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.253012
| 166
| 1
| 166
| 166
| 0.774194
| 0.319277
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
0a0e50bc4018f79233f5194287cbf1c61b8529bc
| 323
|
py
|
Python
|
ipython/startup/import_beartype.py
|
dycw/dotfiles2
|
9e23c4989e9813080da3658a8f98dbb1e03776f2
|
[
"MIT"
] | null | null | null |
ipython/startup/import_beartype.py
|
dycw/dotfiles2
|
9e23c4989e9813080da3658a8f98dbb1e03776f2
|
[
"MIT"
] | null | null | null |
ipython/startup/import_beartype.py
|
dycw/dotfiles2
|
9e23c4989e9813080da3658a8f98dbb1e03776f2
|
[
"MIT"
] | null | null | null |
from contextlib import suppress
with suppress(ModuleNotFoundError):
from beartype import beartype # noqa: F401
from beartype.vale import Is # noqa: F401
from beartype.vale import IsAttr # noqa: F401
from beartype.vale import IsEqual # noqa: F401
from beartype.vale import IsSubclass # noqa: F401
| 32.3
| 54
| 0.736842
| 41
| 323
| 5.804878
| 0.341463
| 0.252101
| 0.201681
| 0.336134
| 0.504202
| 0.504202
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0.210526
| 323
| 9
| 55
| 35.888889
| 0.87451
| 0.167183
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.857143
| 0
| 0.857143
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6a537b1dd1a64dc5b4d65f89403140fc05d38c87
| 65
|
py
|
Python
|
collection/2.py
|
nemero/py_neural
|
87f151097f8c331a06f13b96c4cec9a1ee663abf
|
[
"MIT"
] | null | null | null |
collection/2.py
|
nemero/py_neural
|
87f151097f8c331a06f13b96c4cec9a1ee663abf
|
[
"MIT"
] | 1
|
2017-01-18T18:35:03.000Z
|
2017-01-25T08:55:49.000Z
|
collection/2.py
|
nemero/py_neural
|
87f151097f8c331a06f13b96c4cec9a1ee663abf
|
[
"MIT"
] | null | null | null |
0 0 0 1 0
1 1 1 1
0 0 0 1
0 0 0 1
0 1 1 0
1 0 0 0
1 0 0 0
1 1 1 1
| 8.125
| 9
| 0.507692
| 33
| 65
| 1
| 0.060606
| 0.606061
| 0.454545
| 0.606061
| 1
| 0.878788
| 0.878788
| 0.545455
| 0.545455
| 0
| 0
| 1
| 0.492308
| 65
| 8
| 10
| 8.125
| 0
| 0
| 0
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
6a5e4c6e6126accfbbf8bdd30ff250a594535a73
| 25,200
|
py
|
Python
|
rlschool/liftsim/environment/mansion/test_elevator.py
|
ANCL/QuadPPO
|
b7ed0574467bd321f4259175621a12ff7aeb7d12
|
[
"MIT"
] | 169
|
2019-07-15T02:23:54.000Z
|
2021-10-31T08:31:19.000Z
|
rlschool/liftsim/environment/mansion/test_elevator.py
|
ANCL/QuadPPO
|
b7ed0574467bd321f4259175621a12ff7aeb7d12
|
[
"MIT"
] | 23
|
2019-07-22T05:11:13.000Z
|
2021-11-01T04:53:39.000Z
|
rlschool/liftsim/environment/mansion/test_elevator.py
|
ANCL/QuadPPO
|
b7ed0574467bd321f4259175621a12ff7aeb7d12
|
[
"MIT"
] | 48
|
2019-07-15T02:21:42.000Z
|
2021-09-16T06:40:51.000Z
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Unit test class
"""
from rlschool.liftsim.environment.mansion.person_generators.uniform_generator import UniformPersonGenerator
from rlschool.liftsim.environment.mansion import person_generators
from rlschool.liftsim.environment.mansion.person_generators import uniform_generator
from rlschool.liftsim.environment.mansion.utils import PersonType, MansionState, ElevatorState, ElevatorAction
from rlschool.liftsim.environment.mansion.elevator import Elevator
from rlschool.liftsim.environment.mansion.mansion_manager import MansionManager
from rlschool.liftsim.environment.mansion.mansion_config import MansionConfig
import sys
import unittest
import mock
class TestElevator(unittest.TestCase):
# checked
# @unittest.skip("test")
@mock.patch("person_generators.uniform_generator.UniformPersonGenerator")
def test_door_load_unload(self, mock_uniformgenerator):
"""
stop at the target, load and unload corresponding passengers, open and close the door properly
"""
max_floors = 8
world = MansionConfig(
dt=0.50,
number_of_floors=max_floors,
floor_height=4.0)
test_elevator = Elevator(start_position=0.0,
mansion_config=world,
name="test_elevator")
test_elevator._direction = 1
test_elevator._current_position = 8.0
test_elevator._target_floors = [3, 5]
test_elevator._loaded_person = [
list() for i in range(
test_elevator._number_of_floors)]
test_elevator._loaded_person[2].append(
PersonType(6, 40, 1, 3, world.raw_time))
test_elevator._loaded_person[4].append(
PersonType(7, 35, 1, 5, world.raw_time))
test_elevator._load_weight = 80
tmp_uniform_generator = UniformPersonGenerator()
ret_person = []
ret_person.append(PersonType(0, 50, 3, 5, world.raw_time))
ret_person.append(PersonType(1, 30, 3, 1, world.raw_time))
ret_person.append(PersonType(2, 60, 6, 4, world.raw_time))
ret_person.append(PersonType(4, 55, 3, 4, world.raw_time))
ret_person.append(PersonType(5, 65, 3, 6, world.raw_time))
person_generators.uniform_generator.UniformPersonGenerator.generate_person = mock.Mock(
return_value=(ret_person))
test_mansion = MansionManager(
elevator_number=1,
person_generator=tmp_uniform_generator,
mansion_config=world,
name="test_mansion"
)
test_mansion._elevators = [test_elevator]
dispatch = []
dispatch.append(ElevatorAction(3, 1))
test_mansion.run_mansion(dispatch)
# print(test_mansion.state, "\nworld time is", world.raw_time)
state = test_mansion.state
self.assertAlmostEqual(state.ElevatorStates[0].DoorState, 0.5)
# mock generate_person again
ret_person = []
person_generators.uniform_generator.UniformPersonGenerator.generate_person = mock.Mock(
return_value=(ret_person))
test_mansion.run_mansion(dispatch) # Door fully open, t = 1.0
state = test_mansion.state
self.assertAlmostEqual(state.ElevatorStates[0].DoorState, 1.0)
for i in range(4):
test_mansion.run_mansion(dispatch)
# print(test_mansion.state, "\nworld time is", world.raw_time)
state = test_mansion.state # passenger 6 is unloaded, t = 3.0
self.assertAlmostEqual(state.ElevatorStates[0].LoadWeight, 40)
dispatch = []
dispatch.append(ElevatorAction(0, 0))
for i in range(4):
test_mansion.run_mansion(dispatch)
# print(test_mansion.state, "\nworld time is", world.raw_time)
state = test_mansion.state # passenger 0 and 4 are loaded, t = 5.0
self.assertAlmostEqual(state.ElevatorStates[0].LoadWeight, 145)
for i in range(4):
test_mansion.run_mansion(dispatch)
state = test_mansion.state # passenger 5 is loaded, t = 7.0
self.assertAlmostEqual(state.ElevatorStates[0].LoadWeight, 210)
for i in range(4):
test_mansion.run_mansion(dispatch)
# print(test_mansion.state, "\nworld time is", world.raw_time)
state = test_mansion.state # the door is closed, going up, t = 9.0
self.assertAlmostEqual(state.ElevatorStates[0].Velocity, 1.0)
# checked
# @unittest.skip("test")
@mock.patch("person_generators.uniform_generator.UniformPersonGenerator")
def test_overload(self, mock_uniformgenerator):
"""
overload, two people enter together, check who can enter the elevator one by one
after overload, if the dispatcher still dispatches the elevator to the current floor, ignore the dispatch
"""
max_floors = 8
world = MansionConfig(
dt=0.50,
number_of_floors=max_floors,
floor_height=4.0)
test_elevator = Elevator(start_position=0.0,
mansion_config=world,
name="test_elevator")
test_elevator._direction = 1
test_elevator._current_position = 8.0
test_elevator._target_floors = [5]
test_elevator._loaded_person = [
list() for i in range(
test_elevator._number_of_floors)]
test_elevator._loaded_person[5].append(
PersonType(6, 750, 1, 6, world.raw_time))
test_elevator._loaded_person[7].append(
PersonType(7, 750, 1, 8, world.raw_time))
test_elevator._load_weight = 1500
tmp_uniform_generator = UniformPersonGenerator()
ret_person = []
ret_person.append(PersonType(0, 150, 3, 5, world.raw_time))
ret_person.append(PersonType(1, 50, 3, 1, world.raw_time))
ret_person.append(PersonType(2, 60, 5, 6, world.raw_time))
ret_person.append(PersonType(4, 65, 3, 8, world.raw_time))
ret_person.append(PersonType(5, 65, 3, 6, world.raw_time))
person_generators.uniform_generator.UniformPersonGenerator.generate_person = mock.Mock(
return_value=(ret_person))
test_mansion = MansionManager(
elevator_number=1,
person_generator=tmp_uniform_generator,
mansion_config=world,
name="test_mansion"
)
test_mansion._elevators = [test_elevator]
dispatch = []
dispatch.append(ElevatorAction(3, 1))
test_mansion.run_mansion(dispatch)
# mock generate_person again
ret_person = []
person_generators.uniform_generator.UniformPersonGenerator.generate_person = mock.Mock(
return_value=(ret_person))
test_mansion.run_mansion(dispatch) # Door fully open, t = 1.0
dispatch = []
dispatch.append(ElevatorAction(-1, 0))
for i in range(4):
test_mansion.run_mansion(dispatch) # upload person 4, t = 3.0
dispatch = []
dispatch.append(ElevatorAction(3, 1))
test_mansion.run_mansion(dispatch)
state = test_mansion.state
self.assertAlmostEqual(state.ElevatorStates[0].LoadWeight, 1565)
test_mansion.run_mansion(dispatch)
test_mansion.run_mansion(dispatch) # t = 4.5
state = test_mansion.state
self.assertGreater(state.ElevatorStates[0].Velocity, 0.0)
# print(test_mansion.state, "\nworld time is", world.raw_time)
# checked
# @unittest.skip("test")
@mock.patch("person_generators.uniform_generator.UniformPersonGenerator")
def test_stop_at_dispatch(self, mock_uniformgenerator):
"""
stop at the dispatch floor, open and close the door, then keep going to the target floor
"""
max_floors = 8
# mansion_config
world = MansionConfig(
dt=0.50,
number_of_floors=max_floors,
floor_height=4.0)
# test_elevator
test_elevator = Elevator(start_position=0.0,
mansion_config=world,
name="test_elevator")
test_elevator._direction = 1
test_elevator._current_velocity = 2.0
test_elevator._current_position = 4.0 # currently at 2 floor
test_elevator._target_floors = [5]
test_elevator._loaded_person = [
list() for i in range(
test_elevator._number_of_floors)]
test_elevator._loaded_person[5].append(
PersonType(0, 50, 1, 6, world.raw_time))
test_elevator._load_weight = 50
# test_mansion
tmp_uniform_generator = UniformPersonGenerator()
ret_person = []
person_generators.uniform_generator.UniformPersonGenerator.generate_person = mock.Mock(
return_value=(ret_person))
test_mansion = MansionManager(
elevator_number=1,
person_generator=tmp_uniform_generator,
mansion_config=world,
name="test_mansion"
)
test_mansion._elevators = [test_elevator]
# test
dispatch = []
dispatch.append(ElevatorAction(3, 1))
for i in range(7):
test_mansion.run_mansion(dispatch) # stop at the dispatched floor
# print(test_mansion.state, "\nworld time is", world.raw_time)
# dispatch = []
# dispatch.append(ElevatorAction(-1, 0))
for i in range(2):
# the door is fully open, t = 4.5
test_mansion.run_mansion(dispatch)
state = test_mansion.state
self.assertAlmostEqual(state.ElevatorStates[0].DoorState, 1.0)
dispatch = []
dispatch.append(ElevatorAction(0, 0))
for i in range(6):
# finish time open lag and close the door
test_mansion.run_mansion(dispatch)
state = test_mansion.state
self.assertAlmostEqual(state.ElevatorStates[0].DoorState, 0.0)
for i in range(4):
test_mansion.run_mansion(dispatch) # then keep going up
state = test_mansion.state
self.assertAlmostEqual(state.ElevatorStates[0].Velocity, 2.0)
# checked
# @unittest.skip("test")
@mock.patch("person_generators.uniform_generator.UniformPersonGenerator")
def test_dispatch_when_closing(self, mock_uniformgenerator):
"""
dispatch the current floor when the door is closing
"""
max_floors = 8
# mansion_config
world = MansionConfig(
dt=0.50,
number_of_floors=max_floors,
floor_height=4.0)
# test_elevator
test_elevator = Elevator(start_position=0.0,
mansion_config=world,
name="test_elevator")
test_elevator._direction = 1
test_elevator._current_position = 8.0
test_elevator._target_floors = [4, 5]
test_elevator._loaded_person = [
list() for i in range(
test_elevator._number_of_floors)]
test_elevator._loaded_person[3].append(
PersonType(6, 40, 1, 4, world.raw_time))
test_elevator._loaded_person[4].append(
PersonType(7, 40, 1, 5, world.raw_time))
test_elevator._load_weight = 80
# test_mansion
tmp_uniform_generator = UniformPersonGenerator()
ret_person = []
ret_person.append(PersonType(0, 50, 3, 5, world.raw_time))
ret_person.append(PersonType(1, 50, 3, 1, world.raw_time))
ret_person.append(PersonType(2, 60, 6, 4, world.raw_time))
person_generators.uniform_generator.UniformPersonGenerator.generate_person = mock.Mock(
return_value=(ret_person))
test_mansion = MansionManager(
elevator_number=1,
person_generator=tmp_uniform_generator,
mansion_config=world,
name="test_mansion"
)
test_mansion._elevators = [test_elevator]
dispatch = []
dispatch.append(ElevatorAction(3, 1))
# run_mansion
test_mansion.run_mansion(dispatch)
ret_person = []
person_generators.uniform_generator.UniformPersonGenerator.generate_person = mock.Mock(
return_value=(ret_person))
test_mansion.run_mansion(dispatch) # the door is open, t = 1.0
# print(test_mansion.state, "\nworld time is", world.raw_time)
dispatch = []
dispatch.append(ElevatorAction(-1, 0))
for i in range(4):
test_mansion.run_mansion(dispatch) # load person 0, t = 3.0
# the door is closing, the door state = 0.5, t = 3.5
test_mansion.run_mansion(dispatch)
# come two more passengers
ret_person = []
ret_person.append(PersonType(4, 55, 3, 4, world.raw_time))
ret_person.append(PersonType(5, 65, 3, 6, world.raw_time))
person_generators.uniform_generator.UniformPersonGenerator.generate_person = mock.Mock(
return_value=(ret_person))
dispatch = []
dispatch.append(ElevatorAction(3, 1))
# the door is open, door_state = 1.0, time = 4.0
test_mansion.run_mansion(dispatch)
state = test_mansion.state
self.assertAlmostEqual(state.ElevatorStates[0].DoorState, 1.0)
ret_person = []
person_generators.uniform_generator.UniformPersonGenerator.generate_person = mock.Mock(
return_value=(ret_person))
dispatch = []
dispatch.append(ElevatorAction(-1, 0))
for i in range(4):
test_mansion.run_mansion(dispatch) # load the two passengers
state = test_mansion.state
self.assertAlmostEqual(state.ElevatorStates[0].LoadWeight, 250)
# checked
# @unittest.skip("test")
@mock.patch("person_generators.uniform_generator.UniformPersonGenerator")
def test_dispatch_invalid(self, mock_uniformgenerator):
"""
ignore the invalid dispatch (cannot stop at the dispatch)
and decelerate when needed (test velocity_planner)
"""
max_floors = 8
# mansion_config
world = MansionConfig(
dt=0.50,
number_of_floors=max_floors,
floor_height=4.0
)
# test_elevator
test_elevator = Elevator(start_position=0.0,
mansion_config=world,
name="test_eleavtor")
test_elevator._direction = 1
test_elevator._current_velocity = 2.0
test_elevator._current_position = 8.0 # currently at 3 floor
test_elevator._target_floors = [5, 8] # target 5 floor
test_elevator._loaded_person = [
list() for i in range(
test_elevator._number_of_floors)]
test_elevator._loaded_person[4].append(
PersonType(6, 40, 1, 5, world.raw_time))
test_elevator._loaded_person[7].append(
PersonType(7, 40, 1, 8, world.raw_time))
test_elevator._load_weight = 80
# test_mansion
tmp_uniform_generator = UniformPersonGenerator()
ret_person = []
person_generators.uniform_generator.UniformPersonGenerator.generate_person = mock.Mock(
return_value=(ret_person))
test_mansion = MansionManager(
elevator_number=1,
person_generator=tmp_uniform_generator,
mansion_config=world,
name="test_mansion"
)
test_mansion._elevators = [test_elevator]
dispatch = []
dispatch.append(ElevatorAction(3, 1))
test_mansion.run_mansion(dispatch)
state = test_mansion.state
self.assertAlmostEqual(state.ElevatorStates[0].Velocity, 2.0)
test_mansion.run_mansion(dispatch)
state = test_mansion.state
self.assertAlmostEqual(
state.ElevatorStates[0].Velocity,
2.0) # ignore the invalid dispatch
for i in range(5):
test_mansion.run_mansion(dispatch)
test_mansion.run_mansion(dispatch)
test_mansion.run_mansion(dispatch)
test_mansion.run_mansion(dispatch)
state = test_mansion.state
self.assertAlmostEqual(state.ElevatorStates[0].Velocity, 0.0)
# checked
# @unittest.skip("test")
@mock.patch("person_generators.uniform_generator.UniformPersonGenerator")
def test_no_dispatch(self, mock_uniformgenerator):
"""
arrive at the target, no dispatch, hold still
"""
max_floors = 8
# mansion_config
world = MansionConfig(
dt=0.50,
number_of_floors=max_floors,
floor_height=4.0
)
# test_elevator
test_elevator = Elevator(start_position=0.0,
mansion_config=world,
name="test_eleavtor")
test_elevator._direction = 1
test_elevator._current_velocity = 0
test_elevator._current_position = 8.0 # currently at 3 floor
test_elevator._target_floors = [3] # target 3 floor
test_elevator._loaded_person = [
list() for i in range(
test_elevator._number_of_floors)]
test_elevator._loaded_person[2].append(
PersonType(0, 40, 1, 3, world.raw_time))
test_elevator._load_weight = 40
# test_mansion
tmp_uniform_generator = UniformPersonGenerator()
ret_person = []
person_generators.uniform_generator.UniformPersonGenerator.generate_person = mock.Mock(
return_value=(ret_person))
test_mansion = MansionManager(
elevator_number=1,
person_generator=tmp_uniform_generator,
mansion_config=world,
name="test_mansion"
)
test_mansion._elevators = [test_elevator]
dispatch = []
dispatch.append(ElevatorAction(-1, 0))
test_mansion.run_mansion(dispatch)
test_mansion.run_mansion(dispatch) # open the door
for i in range(4):
test_mansion.run_mansion(dispatch) # unload person 0
test_mansion.run_mansion(dispatch)
test_mansion.run_mansion(dispatch) # close the door
test_mansion.run_mansion(dispatch)
test_mansion.run_mansion(dispatch)
state = test_mansion.state
self.assertAlmostEqual(state.ElevatorStates[0].DoorState, 0.0)
self.assertAlmostEqual(state.ElevatorStates[0].Velocity, 0.0)
self.assertAlmostEqual(state.ElevatorStates[0].Floor, 3.0)
self.assertAlmostEqual(state.ElevatorStates[0].Direction, 0)
# @unittest.skip("test")
@mock.patch("person_generators.uniform_generator.UniformPersonGenerator")
def test_dispatch_twice(self, mock_uniformgenerator):
"""
no target, dispatch (3, 1) first, then (8, -1)
decelerate then accelerate
not accelerate immediately
"""
max_floors = 8
# mansion_config
world = MansionConfig(
dt=0.50,
number_of_floors=max_floors,
floor_height=4.0
)
# test_elevator
test_elevator = Elevator(start_position=0.0,
mansion_config=world,
name="test_elevator")
test_elevator._direction = 1
test_elevator._current_velocity = 2.0
test_elevator._current_position = 9.0
test_elevator._target_floors = list()
test_elevator._loaded_person = [
list() for i in range(
test_elevator._number_of_floors)]
test_elevator._load_weight = 0
# mansion
tmp_uniform_generator = UniformPersonGenerator()
ret_person = []
person_generators.uniform_generator.UniformPersonGenerator.generate_person = mock.Mock(
return_value=(ret_person))
test_mansion = MansionManager(
elevator_number=1,
person_generator=tmp_uniform_generator,
mansion_config=world,
name="test_mansion"
)
test_mansion._elevators = [test_elevator]
# first, dispatch to 8 floor
dispatch = []
dispatch.append(ElevatorAction(4, 1))
test_mansion.run_mansion(dispatch)
test_mansion.run_mansion(dispatch)
test_mansion.run_mansion(dispatch)
dispatch = []
dispatch.append(ElevatorAction(8, -1))
test_mansion.run_mansion(dispatch) # accelerate at once
test_mansion.run_mansion(dispatch)
state = test_mansion.state
self.assertAlmostEqual(state.ElevatorStates[0].Velocity, 2.0)
# checked
# @unittest.skip("test")
@mock.patch("person_generators.uniform_generator.UniformPersonGenerator")
def test_cancel_dispatch(self, mock_uniformgenerator):
"""
no target, dispatch first, accelerate, then cancel dispatch, decelerate
"""
max_floors = 8
# mansion_config
world = MansionConfig(
dt=0.50,
number_of_floors=max_floors,
floor_height=4.0
)
# test_elevator
test_elevator = Elevator(start_position=0.0,
mansion_config=world,
name="test_elevator")
test_elevator._direction = 0
test_elevator._current_velocity = 0.0
test_elevator._current_position = 8.0
test_elevator._target_floors = list()
test_elevator._loaded_person = [
list() for i in range(
test_elevator._number_of_floors)]
test_elevator._load_weight = 0
# mansion
tmp_uniform_generator = UniformPersonGenerator()
ret_person = []
person_generators.uniform_generator.UniformPersonGenerator.generate_person = mock.Mock(
return_value=(ret_person))
test_mansion = MansionManager(
elevator_number=1,
person_generator=tmp_uniform_generator,
mansion_config=world,
name="test_mansion"
)
test_mansion._elevators = [test_elevator]
dispatch = []
dispatch.append(ElevatorAction(6, 1))
test_mansion.run_mansion(dispatch)
test_mansion.run_mansion(dispatch) # t = 1.0
dispatch = []
dispatch.append(ElevatorAction(0, -1))
for i in range(10):
test_mansion.run_mansion(dispatch)
state = test_mansion.state
self.assertAlmostEqual(state.ElevatorStates[0].DoorState, 0.0)
self.assertAlmostEqual(state.ElevatorStates[0].Velocity, 0.0)
@mock.patch("person_generators.uniform_generator.UniformPersonGenerator")
def test_set_direction_0(self, mock_uniformgenerator):
"""
When the elevator is stopped and empty, always set direction as 0 first,
then set as dispatch_target_direction
"""
max_floors = 8
# mansion_config
world = MansionConfig(
dt=0.50,
number_of_floors=max_floors,
floor_height=4.0
)
# test_elevator
test_elevator = Elevator(start_position=0.0,
mansion_config=world,
name="test_elevator")
test_elevator._direction = 1
test_elevator._current_velocity = 0.0
test_elevator._current_position = 8.0 # 3rd floor
test_elevator._target_floors = list()
test_elevator._loaded_person = [
list() for i in range(
test_elevator._number_of_floors)]
test_elevator._load_weight = 0
# mansion
tmp_uniform_generator = UniformPersonGenerator()
ret_person = []
ret_person.append(PersonType(0, 50, 3, 1, world.raw_time))
person_generators.uniform_generator.UniformPersonGenerator.generate_person = mock.Mock(
return_value=(ret_person))
test_mansion = MansionManager(
elevator_number=1,
person_generator=tmp_uniform_generator,
mansion_config=world,
name="test_mansion"
)
test_mansion._elevators = [test_elevator]
dispatch = []
dispatch.append(ElevatorAction(3, -1))
test_mansion.run_mansion(dispatch)
test_mansion.run_mansion(dispatch) # t = 1.0
# print(test_mansion.state, "\nworld time is", world.raw_time)
state = test_mansion.state
self.assertAlmostEqual(state.ElevatorStates[0].Direction, -1)
if __name__ == '__main__':
unittest.main()
| 38.414634
| 113
| 0.634921
| 2,823
| 25,200
| 5.405951
| 0.081119
| 0.074962
| 0.041282
| 0.061923
| 0.856694
| 0.837298
| 0.811087
| 0.778389
| 0.771771
| 0.763318
| 0
| 0.026293
| 0.278571
| 25,200
| 655
| 114
| 38.473282
| 0.813146
| 0.133849
| 0
| 0.814103
| 0
| 0
| 0.035152
| 0.024304
| 0
| 0
| 0
| 0
| 0.051282
| 1
| 0.019231
| false
| 0
| 0.021368
| 0
| 0.042735
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6a6194a2a790f3394fee53f4d76791e84363c659
| 2,245
|
py
|
Python
|
tests/test_integration_protocol.py
|
tstirrat15/pywinrm
|
124fe223eafa7bed3a1bf417c7f1ebc61d98850d
|
[
"MIT"
] | null | null | null |
tests/test_integration_protocol.py
|
tstirrat15/pywinrm
|
124fe223eafa7bed3a1bf417c7f1ebc61d98850d
|
[
"MIT"
] | 1
|
2016-03-10T21:01:51.000Z
|
2016-03-10T21:01:51.000Z
|
tests/test_integration_protocol.py
|
tstirrat15/pywinrm
|
124fe223eafa7bed3a1bf417c7f1ebc61d98850d
|
[
"MIT"
] | 2
|
2016-03-10T17:07:45.000Z
|
2022-02-13T20:50:28.000Z
|
from __future__ import unicode_literals
import re
import pytest
xfail = pytest.mark.xfail
def test_open_shell_and_close_shell(protocol_real):
shell_id = protocol_real.open_shell()
assert re.match('^\w{8}-\w{4}-\w{4}-\w{4}-\w{12}$', shell_id)
protocol_real.close_shell(shell_id)
def test_run_command_with_arguments_and_cleanup_command(protocol_real):
shell_id = protocol_real.open_shell()
command_id = protocol_real.run_command(shell_id, 'ipconfig', ['/all'])
assert re.match('^\w{8}-\w{4}-\w{4}-\w{4}-\w{12}$', command_id)
protocol_real.cleanup_command(shell_id, command_id)
protocol_real.close_shell(shell_id)
def test_run_command_without_arguments_and_cleanup_command(protocol_real):
shell_id = protocol_real.open_shell()
command_id = protocol_real.run_command(shell_id, 'hostname')
assert re.match('^\w{8}-\w{4}-\w{4}-\w{4}-\w{12}$', command_id)
protocol_real.cleanup_command(shell_id, command_id)
protocol_real.close_shell(shell_id)
def test_get_command_output(protocol_real):
shell_id = protocol_real.open_shell()
command_id = protocol_real.run_command(shell_id, 'ipconfig', ['/all'])
std_out, std_err, status_code = protocol_real.get_command_output(
shell_id, command_id)
assert status_code == 0
assert 'Windows IP Configuration' in std_out
assert len(std_err) == 0
protocol_real.cleanup_command(shell_id, command_id)
protocol_real.close_shell(shell_id)
def test_run_command_taking_more_than_60_seconds(protocol_real):
shell_id = protocol_real.open_shell()
command_id = protocol_real.run_command(
shell_id, 'PowerShell -Command Start-Sleep -s 75')
assert re.match('^\w{8}-\w{4}-\w{4}-\w{4}-\w{12}$', command_id)
std_out, std_err, status_code = protocol_real.get_command_output(
shell_id, command_id)
assert status_code == 0
assert len(std_err) == 0
protocol_real.cleanup_command(shell_id, command_id)
protocol_real.close_shell(shell_id)
@xfail()
def test_set_timeout(protocol_real):
raise NotImplementedError()
@xfail()
def test_set_max_env_size(protocol_real):
raise NotImplementedError()
@xfail()
def test_set_locale(protocol_real):
raise NotImplementedError()
| 29.933333
| 74
| 0.740312
| 341
| 2,245
| 4.472141
| 0.187683
| 0.220328
| 0.146885
| 0.137705
| 0.805246
| 0.805246
| 0.805246
| 0.805246
| 0.712131
| 0.712131
| 0
| 0.016546
| 0.13853
| 2,245
| 74
| 75
| 30.337838
| 0.771975
| 0
| 0
| 0.66
| 0
| 0.08
| 0.098441
| 0.057016
| 0
| 0
| 0
| 0
| 0.18
| 1
| 0.16
| false
| 0
| 0.06
| 0
| 0.22
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6a76c068bf5d42b59e23a34e19ca1e56cb448be6
| 79,144
|
py
|
Python
|
dashboard/dashboard/models/alert_group_workflow_test.py
|
Countryboy13/catapult
|
abc7ba7d871fe3c25b0a1bec7fc84fb309034cb7
|
[
"BSD-3-Clause"
] | null | null | null |
dashboard/dashboard/models/alert_group_workflow_test.py
|
Countryboy13/catapult
|
abc7ba7d871fe3c25b0a1bec7fc84fb309034cb7
|
[
"BSD-3-Clause"
] | null | null | null |
dashboard/dashboard/models/alert_group_workflow_test.py
|
Countryboy13/catapult
|
abc7ba7d871fe3c25b0a1bec7fc84fb309034cb7
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=too-many-lines
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import datetime
import json
import uuid
from google.appengine.ext import ndb
from dashboard.common import namespaced_stored_object
from dashboard.common import testing_common
from dashboard.common import utils
from dashboard.models import alert_group
from dashboard.models import alert_group_workflow
from dashboard.models import anomaly
from dashboard.models import subscription
_SERVICE_ACCOUNT_EMAIL = 'service-account@chromium.org'
class AlertGroupWorkflowTest(testing_common.TestCase):
def setUp(self):
super(AlertGroupWorkflowTest, self).setUp()
self.maxDiff = None
self._issue_tracker = testing_common.FakeIssueTrackerService()
self._sheriff_config = testing_common.FakeSheriffConfigClient()
self._pinpoint = testing_common.FakePinpoint()
self._crrev = testing_common.FakeCrrev()
self._gitiles = testing_common.FakeGitiles()
self._revision_info = testing_common.FakeRevisionInfoClient(
infos={
'r_chromium_commit_pos': {
'name':
'Chromium Commit Position',
'url':
'http://test-results.appspot.com/revision_range?start={{R1}}&end={{R2}}',
},
},
revisions={
'master/bot/test_suite/measurement/test_case': {
0: {
'r_chromium_commit_pos': '0'
},
100: {
'r_chromium_commit_pos': '100'
},
}
})
self._service_account = lambda: _SERVICE_ACCOUNT_EMAIL
@staticmethod
def _AddAnomaly(is_summary=False, **kwargs):
default = {
'test': 'master/bot/test_suite/measurement/test_case',
'start_revision': 1,
'end_revision': 100,
'is_improvement': False,
'median_before_anomaly': 1.1,
'median_after_anomaly': 1.3,
'ownership': {
'component': 'Foo>Bar',
'emails': ['x@google.com', 'y@google.com'],
'info_blurb': 'This is an info blurb.',
},
}
default.update(kwargs)
tests = default['test'].split('/')
def GenerateTestDict(tests):
if not tests:
return {}
return {tests[0]: GenerateTestDict(tests[1:])}
testing_common.AddTests([tests[0]], [tests[1]], GenerateTestDict(tests[2:]))
test_key = utils.TestKey(default['test'])
if not is_summary:
t = test_key.get()
t.unescaped_story_name = 'story'
t.put()
default['test'] = test_key
return anomaly.Anomaly(**default).put()
@staticmethod
def _AddSignalQualityScore(anomaly_key, signal_score):
version = 0
key = ndb.Key(
'SignalQuality',
anomaly_key.get().test.string_id(),
'SignalQualityScore',
str(version),
)
return alert_group_workflow.SignalQualityScore(
key=key,
score=signal_score,
updated_time=datetime.datetime.now(),
).put()
@staticmethod
def _AddAlertGroup(anomaly_key,
subscription_name=None,
issue=None,
anomalies=None,
status=None,
project_id=None,
bisection_ids=None,
canonical_group=None):
anomaly_entity = anomaly_key.get()
group = alert_group.AlertGroup(
id=str(uuid.uuid4()),
name=anomaly_entity.benchmark_name,
subscription_name=subscription_name or 'sheriff',
status=alert_group.AlertGroup.Status.untriaged,
project_id=project_id or 'chromium',
active=True,
revision=alert_group.RevisionRange(
repository='chromium',
start=anomaly_entity.start_revision,
end=anomaly_entity.end_revision,
),
bisection_ids=bisection_ids or [],
)
if issue:
group.bug = alert_group.BugInfo(
bug_id=issue.get('id'),
project=issue.get('projectId', 'chromium'),
)
group.project_id = issue.get('projectId', 'chromium')
if anomalies:
group.anomalies = anomalies
if status:
group.status = status
if canonical_group:
group.canonical_group = canonical_group
return group.put()
@staticmethod
# Perform same update on the same group twice because operation will only
# be triggered when monorail not being updated
def _UpdateTwice(workflow, update):
workflow.Process(update=update)
workflow.Process(update=update)
def testAddAnomalies_GroupUntriaged(self):
anomalies = [self._AddAnomaly(), self._AddAnomaly()]
added = [self._AddAnomaly(), self._AddAnomaly()]
group = self._AddAlertGroup(anomalies[0], anomalies=anomalies)
self._sheriff_config.patterns = {
'*': [subscription.Subscription(name='sheriff')],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies + added),
issue={},
))
self.assertEqual(len(group.get().anomalies), 4)
for a in added:
self.assertIn(a, group.get().anomalies)
def testAddAnomalies_GroupTriaged_IssueOpen(self):
anomalies = [self._AddAnomaly(), self._AddAnomaly()]
added = [self._AddAnomaly(), self._AddAnomaly()]
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
anomalies=anomalies,
status=alert_group.AlertGroup.Status.triaged,
)
self._issue_tracker.issue.update({
'state': 'open',
})
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(name='sheriff', auto_triage_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies + added),
issue=self._issue_tracker.issue,
))
self.assertEqual(len(group.get().anomalies), 4)
self.assertEqual(group.get().status, alert_group.AlertGroup.Status.triaged)
for a in added:
self.assertIn(a, group.get().anomalies)
self.assertEqual(group.get().bug.bug_id,
self._issue_tracker.add_comment_args[0])
self.assertIn('Added 2 regressions to the group',
self._issue_tracker.add_comment_args[1])
self.assertIn('4 regressions in test_suite',
self._issue_tracker.add_comment_kwargs['summary'])
self.assertIn('sheriff',
self._issue_tracker.add_comment_kwargs['summary'])
self.assertFalse(self._issue_tracker.add_comment_kwargs['send_email'])
def testAddAnomalies_GroupTriaged_IssueClosed(self):
anomalies = [self._AddAnomaly(), self._AddAnomaly()]
added = [self._AddAnomaly(), self._AddAnomaly()]
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
anomalies=anomalies,
status=alert_group.AlertGroup.Status.closed,
)
self._issue_tracker.issue.update({
'state':
'closed',
'comments': [{
'id': 1,
'author': _SERVICE_ACCOUNT_EMAIL,
'updates': {
'status': 'WontFix'
},
}],
})
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(name='sheriff', auto_triage_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
service_account=self._service_account,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies + added),
issue=self._issue_tracker.issue,
))
self.assertEqual(len(group.get().anomalies), 4)
self.assertEqual('closed', self._issue_tracker.issue.get('state'))
for a in added:
self.assertIn(a, group.get().anomalies)
self.assertEqual(group.get().bug.bug_id,
self._issue_tracker.add_comment_args[0])
self.assertIn('Added 2 regressions to the group',
self._issue_tracker.add_comment_args[1])
self.assertIn('4 regressions in test_suite',
self._issue_tracker.add_comment_kwargs['summary'])
self.assertIn('sheriff',
self._issue_tracker.add_comment_kwargs['summary'])
self.assertFalse(self._issue_tracker.add_comment_kwargs['send_email'])
def testAddAnomalies_GroupTriaged_IssueClosed_AutoBisect(self):
anomalies = [self._AddAnomaly(), self._AddAnomaly()]
added = [self._AddAnomaly(), self._AddAnomaly()]
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
anomalies=anomalies,
status=alert_group.AlertGroup.Status.closed,
)
self._issue_tracker.issue.update({
'state':
'closed',
'comments': [{
'id': 1,
'author': _SERVICE_ACCOUNT_EMAIL,
'updates': {
'status': 'WontFix'
},
}],
})
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(
name='sheriff',
auto_triage_enable=True,
auto_bisect_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
service_account=self._service_account,
)
w.Process(
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies + added),
issue=self._issue_tracker.issue,
))
self.assertEqual(len(group.get().anomalies), 4)
self.assertEqual('open', self._issue_tracker.issue.get('state'))
for a in added:
self.assertIn(a, group.get().anomalies)
self.assertEqual(group.get().bug.bug_id,
self._issue_tracker.add_comment_args[0])
self.assertIn('Added 2 regressions to the group',
self._issue_tracker.add_comment_args[1])
self.assertFalse(self._issue_tracker.add_comment_kwargs['send_email'])
def testUpdate_GroupTriaged_IssueClosed(self):
anomalies = [self._AddAnomaly(), self._AddAnomaly()]
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
status=alert_group.AlertGroup.Status.triaged,
)
self._issue_tracker.issue.update({
'state':
'closed',
'comments': [{
'id': 1,
'author': _SERVICE_ACCOUNT_EMAIL,
'updates': {
'status': 'WontFix'
},
}],
})
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(name='sheriff', auto_triage_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
service_account=self._service_account,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=self._issue_tracker.issue,
))
self.assertEqual(group.get().status, alert_group.AlertGroup.Status.closed)
def testAddAnomalies_GroupTriaged_IssueClosed_Manual(self):
anomalies = [self._AddAnomaly(), self._AddAnomaly()]
added = [self._AddAnomaly(), self._AddAnomaly()]
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
anomalies=anomalies,
status=alert_group.AlertGroup.Status.closed,
)
self._issue_tracker.issue.update({
'state':
'closed',
'comments': [{
'id': 2,
'author': "sheriff@chromium.org",
'updates': {
'status': 'WontFix'
},
}, {
'id': 1,
'author': _SERVICE_ACCOUNT_EMAIL,
'updates': {
'status': 'WontFix'
},
}],
})
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(
name='sheriff',
auto_triage_enable=True,
auto_bisect_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
service_account=self._service_account,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies + added),
issue=self._issue_tracker.issue,
))
self.assertEqual(len(group.get().anomalies), 4)
self.assertEqual('closed', self._issue_tracker.issue.get('state'))
for a in added:
self.assertIn(a, group.get().anomalies)
self.assertEqual(group.get().bug.bug_id,
self._issue_tracker.add_comment_args[0])
self.assertIn('Added 2 regressions to the group',
self._issue_tracker.add_comment_args[1])
self.assertFalse(self._issue_tracker.add_comment_kwargs['send_email'])
def testUpdate_GroupTriaged_IssueClosed_AllTriaged(self):
anomalies = [
self._AddAnomaly(recovered=True),
self._AddAnomaly(recovered=True)
]
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
anomalies=anomalies,
status=alert_group.AlertGroup.Status.triaged,
)
self._issue_tracker.issue.update({
'state':
'closed',
'comments': [{
'id': 1,
'author': _SERVICE_ACCOUNT_EMAIL,
'updates': {
'status': 'WontFix'
},
}],
})
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(name='sheriff', auto_triage_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
service_account=self._service_account,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=self._issue_tracker.issue,
))
self.assertEqual(group.get().status, alert_group.AlertGroup.Status.closed)
self.assertIsNone(self._issue_tracker.add_comment_args)
def testAddAnomalies_GroupTriaged_CommentsNone(self):
anomalies = [self._AddAnomaly(), self._AddAnomaly()]
added = [self._AddAnomaly(), self._AddAnomaly()]
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
anomalies=anomalies,
status=alert_group.AlertGroup.Status.closed,
)
self._issue_tracker.issue.update({
'state': 'closed',
'comments': None,
})
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(
name='sheriff',
auto_triage_enable=True,
auto_bisect_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
service_account=self._service_account,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies + added),
issue=self._issue_tracker.issue,
))
self.assertEqual(len(group.get().anomalies), 4)
self.assertEqual('closed', self._issue_tracker.issue.get('state'))
for a in added:
self.assertIn(a, group.get().anomalies)
self.assertEqual(group.get().bug.bug_id,
self._issue_tracker.add_comment_args[0])
self.assertIn('Added 2 regressions to the group',
self._issue_tracker.add_comment_args[1])
self.assertFalse(self._issue_tracker.add_comment_kwargs['send_email'])
def testUpdate_GroupClosed_IssueOpen(self):
anomalies = [self._AddAnomaly(), self._AddAnomaly()]
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
status=alert_group.AlertGroup.Status.closed,
)
self._issue_tracker.issue.update({
'state': 'open',
})
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(name='sheriff', auto_triage_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=self._issue_tracker.issue,
))
self.assertEqual(group.get().status, alert_group.AlertGroup.Status.triaged)
def testUpdate_GroupTriaged_AlertsAllRecovered(self):
anomalies = [
self._AddAnomaly(recovered=True),
self._AddAnomaly(recovered=True),
]
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
status=alert_group.AlertGroup.Status.triaged,
)
self._issue_tracker.issue.update({
'state': 'open',
})
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(name='sheriff', auto_triage_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=self._issue_tracker.issue,
))
self.assertEqual('closed', self._issue_tracker.issue.get('state'))
def testUpdate_GroupTriaged_AlertsPartRecovered(self):
anomalies = [self._AddAnomaly(recovered=True), self._AddAnomaly()]
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
status=alert_group.AlertGroup.Status.triaged,
)
self._issue_tracker.issue.update({
'state': 'open',
})
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(name='sheriff', auto_triage_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=self._issue_tracker.issue,
))
self.assertEqual('open', self._issue_tracker.issue.get('state'))
def testTriage_GroupUntriaged(self):
anomalies = [self._AddAnomaly(), self._AddAnomaly()]
group = self._AddAlertGroup(
anomalies[0],
status=alert_group.AlertGroup.Status.untriaged,
)
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(name='sheriff', auto_triage_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
revision_info=self._revision_info,
config=alert_group_workflow.AlertGroupWorkflow.Config(
active_window=datetime.timedelta(days=7),
triage_delay=datetime.timedelta(hours=0),
),
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=None,
))
self.assertIn('2 regressions', self._issue_tracker.new_bug_args[0])
self.assertIn(
'Chromium Commit Position: http://test-results.appspot.com/revision_range?start=0&end=100',
self._issue_tracker.new_bug_args[1])
def testTriage_GroupUntriaged_MultiSubscriptions(self):
anomalies = [self._AddAnomaly(), self._AddAnomaly()]
group = self._AddAlertGroup(
anomalies[0],
status=alert_group.AlertGroup.Status.untriaged,
)
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(name='sheriff'),
subscription.Subscription(
name='sheriff_not_bind', auto_triage_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
revision_info=self._revision_info,
config=alert_group_workflow.AlertGroupWorkflow.Config(
active_window=datetime.timedelta(days=7),
triage_delay=datetime.timedelta(hours=0),
),
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=None,
))
self.assertIsNone(self._issue_tracker.new_bug_args)
def testTriage_GroupUntriaged_NonChromiumProject(self):
anomalies = [self._AddAnomaly()]
# TODO(dberris): Figure out a way to not have to hack the fake service to
# seed it with the correct issue in the correct project.
self._issue_tracker.issues[(
'v8', self._issue_tracker.bug_id)] = self._issue_tracker.issues[(
'chromium', self._issue_tracker.bug_id)]
del self._issue_tracker.issues[('chromium', self._issue_tracker.bug_id)]
self._issue_tracker.issues[('v8', self._issue_tracker.bug_id)].update({
'projectId': 'v8',
})
group = self._AddAlertGroup(
anomalies[0],
status=alert_group.AlertGroup.Status.untriaged,
project_id='v8')
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(
name='sheriff',
auto_triage_enable=True,
monorail_project_id='v8')
],
}
self.assertEqual(group.get().project_id, 'v8')
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
revision_info=self._revision_info,
config=alert_group_workflow.AlertGroupWorkflow.Config(
active_window=datetime.timedelta(days=7),
triage_delay=datetime.timedelta(hours=0),
))
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=None))
self.assertEqual(group.get().bug.project, 'v8')
self.assertEqual(anomalies[0].get().project_id, 'v8')
def testTriage_GroupUntriaged_MultipleRange(self):
anomalies = [
self._AddAnomaly(median_before_anomaly=0.2, start_revision=10),
self._AddAnomaly(median_before_anomaly=0.1)
]
group = self._AddAlertGroup(
anomalies[0],
status=alert_group.AlertGroup.Status.untriaged,
)
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(name='sheriff', auto_triage_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
revision_info=self._revision_info,
config=alert_group_workflow.AlertGroupWorkflow.Config(
active_window=datetime.timedelta(days=7),
triage_delay=datetime.timedelta(hours=0),
),
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=None,
))
self.assertIn('2 regressions', self._issue_tracker.new_bug_args[0])
self.assertIn(
'Chromium Commit Position: http://test-results.appspot.com/revision_range?start=0&end=100',
self._issue_tracker.new_bug_args[1])
def testTriage_GroupUntriaged_InfAnomaly(self):
anomalies = [self._AddAnomaly(median_before_anomaly=0), self._AddAnomaly()]
group = self._AddAlertGroup(
anomalies[0],
status=alert_group.AlertGroup.Status.untriaged,
)
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(name='sheriff', auto_triage_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
revision_info=self._revision_info,
config=alert_group_workflow.AlertGroupWorkflow.Config(
active_window=datetime.timedelta(days=7),
triage_delay=datetime.timedelta(hours=0),
),
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=None,
))
self.assertIn('inf', self._issue_tracker.new_bug_args[1])
def testTriage_GroupTriaged_InfAnomaly(self):
anomalies = [self._AddAnomaly(median_before_anomaly=0), self._AddAnomaly()]
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
status=alert_group.AlertGroup.Status.triaged,
)
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(name='sheriff', auto_triage_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=self._issue_tracker.issue,
))
self.assertIn('inf', self._issue_tracker.add_comment_args[1])
self.assertFalse(self._issue_tracker.add_comment_kwargs['send_email'])
def testArchive_GroupUntriaged(self):
anomalies = [self._AddAnomaly(), self._AddAnomaly()]
group = self._AddAlertGroup(
anomalies[0],
anomalies=anomalies,
status=alert_group.AlertGroup.Status.untriaged,
)
self._sheriff_config.patterns = {
'*': [subscription.Subscription(name='sheriff')],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
config=alert_group_workflow.AlertGroupWorkflow.Config(
active_window=datetime.timedelta(days=0),
triage_delay=datetime.timedelta(hours=0),
),
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow() + datetime.timedelta(seconds=1),
anomalies=ndb.get_multi(anomalies),
issue=None,
))
self.assertEqual(False, group.get().active)
def testArchive_GroupTriaged(self):
anomalies = [self._AddAnomaly(), self._AddAnomaly()]
group = self._AddAlertGroup(
anomalies[0],
anomalies=anomalies,
issue=self._issue_tracker.issue,
status=alert_group.AlertGroup.Status.triaged,
)
self._issue_tracker.issue.update({
'state': 'open',
})
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(name='sheriff', auto_triage_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
config=alert_group_workflow.AlertGroupWorkflow.Config(
active_window=datetime.timedelta(days=0),
triage_delay=datetime.timedelta(hours=0),
),
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=self._issue_tracker.issue,
))
self.assertEqual(False, group.get().active)
def testBisect_GroupTriaged(self):
anomalies = [
self._AddAnomaly(median_before_anomaly=0.2),
self._AddAnomaly(median_before_anomaly=0.1),
]
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
status=alert_group.AlertGroup.Status.triaged,
)
self._issue_tracker.issue.update({
'state': 'open',
})
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(
name='sheriff',
auto_triage_enable=True,
auto_bisect_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
pinpoint=self._pinpoint,
crrev=self._crrev,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=self._issue_tracker.issue,
))
tags = json.loads(self._pinpoint.new_job_request['tags'])
self.assertEqual(anomalies[1].urlsafe(), tags['alert'])
# Tags must be a dict of key/value string pairs.
for k, v in tags.items():
self.assertIsInstance(k, basestring)
self.assertIsInstance(v, basestring)
self.assertEqual(['123456'], group.get().bisection_ids)
self.assertEqual(['Chromeperf-Auto-Bisected'],
self._issue_tracker.add_comment_kwargs['labels'])
def testBisect_GroupTriaged_WithSummary(self):
anomalies = [
self._AddAnomaly(
test='master/bot1/test_suite/measurement/test_case1',
median_before_anomaly=0.2,
),
self._AddAnomaly(
test='master/bot1/test_suite/measurement/test_case2',
median_before_anomaly=0.1,
is_summary=True,
),
]
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
status=alert_group.AlertGroup.Status.triaged,
)
self._issue_tracker.issue.update({
'state': 'open',
})
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(
name='sheriff',
auto_triage_enable=True,
auto_bisect_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
pinpoint=self._pinpoint,
crrev=self._crrev,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=self._issue_tracker.issue,
))
tags = json.loads(self._pinpoint.new_job_request['tags'])
self.assertEqual(anomalies[0].urlsafe(), tags['alert'])
# Tags must be a dict of key/value string pairs.
for k, v in tags.items():
self.assertIsInstance(k, basestring)
self.assertIsInstance(v, basestring)
self.assertEqual(['123456'], group.get().bisection_ids)
self.assertEqual(['Chromeperf-Auto-Bisected'],
self._issue_tracker.add_comment_kwargs['labels'])
def testBisect_GroupTriaged_WithSignalQuality(self):
anomalies = [
self._AddAnomaly(
test='master/bot/test_suite/measurement/test_case1',
median_before_anomaly=0.2,
),
self._AddAnomaly(
test='master/bot/test_suite/measurement/test_case2',
median_before_anomaly=0.1,
),
]
self._AddSignalQualityScore(anomalies[0], 0.9)
self._AddSignalQualityScore(anomalies[1], 0.8)
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
status=alert_group.AlertGroup.Status.triaged,
)
self._issue_tracker.issue.update({
'state': 'open',
})
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(
name='sheriff',
auto_triage_enable=True,
auto_bisect_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
pinpoint=self._pinpoint,
crrev=self._crrev,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=self._issue_tracker.issue,
))
tags = json.loads(self._pinpoint.new_job_request['tags'])
self.assertEqual(anomalies[0].urlsafe(), tags['alert'])
def testBisect_GroupTriaged_WithDefaultSignalQuality(self):
anomalies = [
self._AddAnomaly(
test='master/bot/test_suite/measurement/test_case1',
median_before_anomaly=0.1,
),
self._AddAnomaly(
test='master/bot/test_suite/measurement/test_case2',
median_before_anomaly=0.2,
),
self._AddAnomaly(
test='master/bot/test_suite/measurement/test_case3',
median_before_anomaly=0.3,
),
]
self._AddSignalQualityScore(anomalies[0], 0.3)
self._AddSignalQualityScore(anomalies[1], 0.2)
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
status=alert_group.AlertGroup.Status.triaged,
)
self._issue_tracker.issue.update({
'state': 'open',
})
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(
name='sheriff',
auto_triage_enable=True,
auto_bisect_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
pinpoint=self._pinpoint,
crrev=self._crrev,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=self._issue_tracker.issue,
))
tags = json.loads(self._pinpoint.new_job_request['tags'])
self.assertEqual(anomalies[2].urlsafe(), tags['alert'])
# Tags must be a dict of key/value string pairs.
for k, v in tags.items():
self.assertIsInstance(k, basestring)
self.assertIsInstance(v, basestring)
self.assertEqual(['123456'], group.get().bisection_ids)
self.assertEqual(['Chromeperf-Auto-Bisected'],
self._issue_tracker.add_comment_kwargs['labels'])
def testBisect_GroupTriaged_MultiSubscriptions(self):
anomalies = [
self._AddAnomaly(median_before_anomaly=0.2),
self._AddAnomaly(median_before_anomaly=0.1),
]
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
status=alert_group.AlertGroup.Status.triaged,
)
self._issue_tracker.issue.update({
'state': 'open',
})
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(name='sheriff'),
subscription.Subscription(
name='sheriff_not_bind',
auto_triage_enable=True,
auto_bisect_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
pinpoint=self._pinpoint,
crrev=self._crrev,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=self._issue_tracker.issue,
))
self.assertIsNone(self._pinpoint.new_job_request)
def testBisect_GroupBisected(self):
anomalies = [self._AddAnomaly(), self._AddAnomaly()]
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
status=alert_group.AlertGroup.Status.bisected,
)
self._issue_tracker.issue.update({
'state': 'open',
})
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(
name='sheriff',
auto_triage_enable=True,
auto_bisect_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
pinpoint=self._pinpoint,
crrev=self._crrev,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=self._issue_tracker.issue,
))
self.assertIsNone(self._pinpoint.new_job_request)
def testBisect_GroupTriaged_NoRecovered(self):
anomalies = [
self._AddAnomaly(
median_before_anomaly=0.1, median_after_anomaly=1.0,
recovered=True),
self._AddAnomaly(median_before_anomaly=0.2, median_after_anomaly=1.0),
]
group = self._AddAlertGroup(
anomalies[1],
issue=self._issue_tracker.issue,
status=alert_group.AlertGroup.Status.triaged,
anomalies=anomalies,
)
self._issue_tracker.issue.update({
'state': 'open',
})
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(
name='sheriff',
auto_triage_enable=True,
auto_bisect_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
pinpoint=self._pinpoint,
crrev=self._crrev,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=self._issue_tracker.issue,
))
self.assertIsNotNone(self._pinpoint.new_job_request)
self.assertEqual(group.get().status, alert_group.AlertGroup.Status.bisected)
# Check that we bisected the anomaly that is not recovered.
recovered_anomaly = anomalies[0].get()
bisected_anomaly = anomalies[1].get()
self.assertNotEqual(recovered_anomaly.pinpoint_bisects, ['123456'])
self.assertEqual(bisected_anomaly.pinpoint_bisects, ['123456'])
def testBisect_GroupTriaged_NoIgnored(self):
anomalies = [
# This anomaly is manually ignored.
self._AddAnomaly(
median_before_anomaly=0.1, median_after_anomaly=1.0, bug_id=-2),
self._AddAnomaly(
median_before_anomaly=0.2,
median_after_anomaly=1.0,
start_revision=20),
]
group = self._AddAlertGroup(
anomalies[1],
issue=self._issue_tracker.issue,
status=alert_group.AlertGroup.Status.triaged,
anomalies=anomalies,
)
self._issue_tracker.issue.update({
'state': 'open',
})
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(
name='sheriff',
auto_triage_enable=True,
auto_bisect_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
pinpoint=self._pinpoint,
crrev=self._crrev,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=self._issue_tracker.issue,
))
self.assertIsNotNone(self._pinpoint.new_job_request)
self.assertEqual(self._pinpoint.new_job_request['bug_id'], 12345)
self.assertEqual(group.get().status, alert_group.AlertGroup.Status.bisected)
# Check that we bisected the anomaly that is not ignored.
ignored_anomaly = anomalies[0].get()
bisected_anomaly = anomalies[1].get()
self.assertNotEqual(ignored_anomaly.pinpoint_bisects, ['123456'])
self.assertEqual(bisected_anomaly.pinpoint_bisects, ['123456'])
def testBisect_GroupTriaged_AlertWithBug(self):
anomalies = [
self._AddAnomaly(median_before_anomaly=0.2),
self._AddAnomaly(
median_before_anomaly=0.1,
bug_id=12340,
project_id='v8',
),
]
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
status=alert_group.AlertGroup.Status.triaged,
)
self._issue_tracker.issue.update({
'state': 'open',
})
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(
name='sheriff',
auto_triage_enable=True,
auto_bisect_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
pinpoint=self._pinpoint,
crrev=self._crrev,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=self._issue_tracker.issue,
))
self.assertEqual(self._issue_tracker.bug_id,
self._pinpoint.new_job_request['bug_id'])
self.assertEqual('chromium', self._pinpoint.new_job_request['project'])
self.assertEqual(['123456'], group.get().bisection_ids)
def testBisect_GroupTriaged_MultiBot(self):
anomalies = [
self._AddAnomaly(
test='master/bot1/test_suite/measurement/test_case1',
median_before_anomaly=0.3,
),
self._AddAnomaly(
test='master/bot1/test_suite/measurement/test_case2',
median_before_anomaly=0.2,
),
self._AddAnomaly(
test='master/bot2/test_suite/measurement/test_case2',
median_before_anomaly=0.1,
),
]
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
status=alert_group.AlertGroup.Status.triaged,
)
self._issue_tracker.issue.update({
'state': 'open',
})
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(
name='sheriff',
auto_triage_enable=True,
auto_bisect_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
pinpoint=self._pinpoint,
crrev=self._crrev,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=self._issue_tracker.issue,
))
self.assertEqual(
anomalies[1].urlsafe(),
json.loads(self._pinpoint.new_job_request['tags'])['alert'])
self.assertEqual(['123456'], group.get().bisection_ids)
def testBisect_GroupTriaged_MultiBot_PartInf(self):
anomalies = [
self._AddAnomaly(
test='master/bot1/test_suite/measurement/test_case1',
median_before_anomaly=0.0,
),
self._AddAnomaly(
test='master/bot1/test_suite/measurement/test_case2',
median_before_anomaly=0.2,
),
self._AddAnomaly(
test='master/bot2/test_suite/measurement/test_case2',
median_before_anomaly=0.1,
),
]
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
status=alert_group.AlertGroup.Status.triaged,
)
self._issue_tracker.issue.update({
'state': 'open',
})
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(
name='sheriff',
auto_triage_enable=True,
auto_bisect_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
pinpoint=self._pinpoint,
crrev=self._crrev,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=self._issue_tracker.issue,
))
self.assertEqual(
anomalies[1].urlsafe(),
json.loads(self._pinpoint.new_job_request['tags'])['alert'])
self.assertEqual(['123456'], group.get().bisection_ids)
def testBisect_GroupTriaged_MultiBot_AllInf(self):
anomalies = [
self._AddAnomaly(
test='master/bot1/test_suite/measurement/test_case1',
median_before_anomaly=0.0,
median_after_anomaly=1.0,
),
self._AddAnomaly(
test='master/bot1/test_suite/measurement/test_case2',
median_before_anomaly=0.0,
median_after_anomaly=2.0,
),
self._AddAnomaly(
test='master/bot2/test_suite/measurement/test_case2',
median_before_anomaly=0.1,
),
]
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
status=alert_group.AlertGroup.Status.triaged,
)
self._issue_tracker.issue.update({
'state': 'open',
})
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(
name='sheriff',
auto_triage_enable=True,
auto_bisect_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
pinpoint=self._pinpoint,
crrev=self._crrev,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=self._issue_tracker.issue,
))
self.assertEqual(
anomalies[1].urlsafe(),
json.loads(self._pinpoint.new_job_request['tags'])['alert'])
self.assertEqual(['123456'], group.get().bisection_ids)
def testBisect_GroupTriaged_AlertBisected(self):
anomalies = [
self._AddAnomaly(
test='master/bot1/test_suite/measurement/test_case1',
pinpoint_bisects=['abcdefg'],
median_before_anomaly=0.2,
),
self._AddAnomaly(
test='master/bot1/test_suite/measurement/test_case2',
pinpoint_bisects=['abcdef'],
median_before_anomaly=0.1,
),
]
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
status=alert_group.AlertGroup.Status.triaged,
bisection_ids=['abcdef'],
)
self._issue_tracker.issue.update({
'state': 'open',
})
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(
name='sheriff',
auto_triage_enable=True,
auto_bisect_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
pinpoint=self._pinpoint,
crrev=self._crrev,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=self._issue_tracker.issue,
))
self.assertEqual(
anomalies[0].urlsafe(),
json.loads(self._pinpoint.new_job_request['tags'])['alert'])
self.assertItemsEqual(['abcdef', '123456'], group.get().bisection_ids)
def testBisect_GroupTriaged_CrrevFailed(self):
anomalies = [self._AddAnomaly(), self._AddAnomaly()]
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
status=alert_group.AlertGroup.Status.triaged,
)
self._issue_tracker.issue.update({
'state': 'open',
})
self._crrev.SetFailure()
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(
name='sheriff',
auto_triage_enable=True,
auto_bisect_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
pinpoint=self._pinpoint,
crrev=self._crrev,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=self._issue_tracker.issue,
))
self.assertEqual(alert_group.AlertGroup.Status.bisected, group.get().status)
self.assertEqual([], group.get().bisection_ids)
self.assertEqual(['Chromeperf-Auto-NeedsAttention'],
self._issue_tracker.add_comment_kwargs['labels'])
def testBisect_GroupTriaged_PinpointFailed(self):
anomalies = [self._AddAnomaly(), self._AddAnomaly()]
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
status=alert_group.AlertGroup.Status.triaged,
)
self._issue_tracker.issue.update({
'state': 'open',
})
self._pinpoint.SetFailure()
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(
name='sheriff',
auto_triage_enable=True,
auto_bisect_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
pinpoint=self._pinpoint,
crrev=self._crrev,
)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=self._issue_tracker.issue,
))
self.assertEqual(alert_group.AlertGroup.Status.bisected, group.get().status)
self.assertEqual([], group.get().bisection_ids)
self.assertEqual(['Chromeperf-Auto-NeedsAttention'],
self._issue_tracker.add_comment_kwargs['labels'])
def testBisect_SingleCL(self):
anomalies = [
self._AddAnomaly(
# Current implementation requires that a revision string is between
# 5 and 7 digits long.
start_revision=11111,
end_revision=11111,
test='ChromiumPerf/some-bot/some-benchmark/some-metric/some-story')
]
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
status=alert_group.AlertGroup.Status.triaged)
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(
name='sheriff',
auto_triage_enable=True,
auto_bisect_enable=True)
]
}
# Here we are simulating that a gitiles service will respond to a specific
# repository URL (the format is not important) and can map a commit (40
# hexadecimal characters) to some commit information.
self._gitiles._repo_commit_list.update({
'git://chromium': {
'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa': {
'author': {
'email': 'author@chromium.org',
},
'message': 'This is some commit.\n\nWith some details.',
}
}
})
# We are also seeding some repository information to let us set which
# repository URL is being used to look up data from a gitiles service.
namespaced_stored_object.Set('repositories', {
'chromium': {
'repository_url': 'git://chromium'
},
})
# Current implementation requires that a git hash is 40 characters of
# hexadecimal digits.
self._crrev.SetSuccess('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa')
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
pinpoint=self._pinpoint,
crrev=self._crrev,
gitiles=self._gitiles)
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=self._issue_tracker.issue))
self.assertEqual(alert_group.AlertGroup.Status.bisected, group.get().status)
self.assertEqual([], group.get().bisection_ids)
self.assertEqual(['Chromeperf-Auto-Assigned'],
self._issue_tracker.add_comment_kwargs['labels'])
self.assertIn(('Assigning to author@chromium.org because this is the '
'only CL in range:'),
self._issue_tracker.add_comment_args[1])
def testBisect_ExplicitOptOut(self):
anomalies = [self._AddAnomaly(), self._AddAnomaly()]
group = self._AddAlertGroup(
anomalies[0],
issue=self._issue_tracker.issue,
status=alert_group.AlertGroup.Status.triaged,
)
self._issue_tracker.issue.update({
'state':
'open',
'labels':
self._issue_tracker.issue.get('labels') +
['Chromeperf-Auto-BisectOptOut']
})
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(
name='sheriff',
auto_triage_enable=True,
auto_bisect_enable=True)
],
}
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
pinpoint=self._pinpoint,
crrev=self._crrev,
)
self.assertIn('Chromeperf-Auto-BisectOptOut',
self._issue_tracker.issue.get('labels'))
self._UpdateTwice(
workflow=w,
update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(anomalies),
issue=self._issue_tracker.issue,
))
self.assertIsNone(self._pinpoint.new_job_request)
def testAutoMerge_SucessfulMerge(self):
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(
name='sheriff', auto_triage_enable=True, auto_merge_enable=True)
],
}
self._issue_tracker._bug_id_counter = 42
duplicate_issue = self._issue_tracker.GetIssue(
self._issue_tracker.NewBug(status='Duplicate',
state='closed')['bug_id'])
canonical_issue = self._issue_tracker.GetIssue(
self._issue_tracker.NewBug()['bug_id'])
grouped_anomalies = [self._AddAnomaly(), self._AddAnomaly()]
all_anomalies = grouped_anomalies + [self._AddAnomaly()]
group = self._AddAlertGroup(
grouped_anomalies[0],
issue=duplicate_issue,
anomalies=grouped_anomalies,
status=alert_group.AlertGroup.Status.triaged,
)
canonical_group = self._AddAlertGroup(
grouped_anomalies[0],
issue=canonical_issue,
status=alert_group.AlertGroup.Status.triaged,
)
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
config=alert_group_workflow.AlertGroupWorkflow.Config(
active_window=datetime.timedelta(days=7),
triage_delay=datetime.timedelta(hours=0),
),
)
u = alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(all_anomalies),
issue=duplicate_issue,
canonical_group=canonical_group.get(),
)
w.Process(update=u)
# First two are NewBug calls in the test itself.
self.assertEqual(len(self._issue_tracker.calls), 4)
self.assertEqual(self._issue_tracker.calls[2]['method'], 'AddBugComment')
self.assertEqual(len(self._issue_tracker.calls[2]['args']), 2)
self.assertEqual(self._issue_tracker.calls[2]['args'][0], 42)
self.assertIn(
'(%s) was automatically merged into %s' %
(group.string_id(), canonical_group.string_id()),
self._issue_tracker.calls[2]['args'][1])
self.assertEqual(self._issue_tracker.calls[2]['kwargs'], {
'project': 'chromium',
'send_email': False
})
self.assertEqual(
self._issue_tracker.calls[3], {
'method': 'AddBugComment',
'args': (42, None),
'kwargs': {
'summary':
'[%s]: %d regressions in %s' % ('sheriff', 3, 'test_suite'),
'labels': [
'Type-Bug-Regression', 'Chromeperf-Auto-Triaged',
'Restrict-View-Google', 'Pri-2'
],
'cc_list': [],
'components': ['Foo>Bar'],
'project':
'chromium',
'send_email':
False
},
})
self.assertTrue(all(a.get().bug_id == 43 for a in all_anomalies))
self.assertEqual(group.get().canonical_group, canonical_group)
self.assertEqual(group.get().status, alert_group.AlertGroup.Status.closed)
def testAutoMerge_AutoMergeNotOptIn(self):
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(name='sheriff', auto_triage_enable=True)
],
}
self._issue_tracker._bug_id_counter = 42
duplicate_issue = self._issue_tracker.GetIssue(
self._issue_tracker.NewBug(status='Duplicate',
state='closed')['bug_id'])
canonical_issue = self._issue_tracker.GetIssue(
self._issue_tracker.NewBug()['bug_id'])
grouped_anomalies = [self._AddAnomaly(), self._AddAnomaly()]
all_anomalies = grouped_anomalies + [self._AddAnomaly()]
group = self._AddAlertGroup(
grouped_anomalies[0],
issue=duplicate_issue,
anomalies=grouped_anomalies,
status=alert_group.AlertGroup.Status.triaged,
)
canonical_group = self._AddAlertGroup(
grouped_anomalies[0],
issue=canonical_issue,
status=alert_group.AlertGroup.Status.triaged,
)
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
config=alert_group_workflow.AlertGroupWorkflow.Config(
active_window=datetime.timedelta(days=7),
triage_delay=datetime.timedelta(hours=0),
),
)
u = alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(all_anomalies),
issue=duplicate_issue,
canonical_group=canonical_group.get(),
)
self._UpdateTwice(workflow=w, update=u)
# First two are NewBug calls in the test itself.
self.assertEqual(len(self._issue_tracker.calls), 3)
self.assertEqual(self._issue_tracker.calls[2]['method'], 'AddBugComment')
self.assertEqual(len(self._issue_tracker.calls[2]['args']), 2)
self.assertEqual(self._issue_tracker.calls[2]['args'][0], 42)
self.assertNotIn('was automatically merged into',
self._issue_tracker.calls[2]['args'][1])
self.assertIn('Alert group updated:',
self._issue_tracker.calls[2]['args'][1])
self.assertEqual(
self._issue_tracker.calls[2]['kwargs'], {
'summary':
'[%s]: %d regressions in %s' % ('sheriff', 3, 'test_suite'),
'labels': [
'Type-Bug-Regression', 'Chromeperf-Auto-Triaged',
'Restrict-View-Google', 'Pri-2'
],
'cc_list': [],
'components': ['Foo>Bar'],
'project':
'chromium',
'send_email':
False
})
self.assertTrue(all(a.get().bug_id == 42 for a in all_anomalies))
self.assertIsNone(group.get().canonical_group)
self.assertEqual(group.get().status, alert_group.AlertGroup.Status.closed)
def testAutoMerge_NoCanonicalIssue(self):
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(
name='sheriff', auto_triage_enable=True, auto_merge_enable=True)
],
}
self._issue_tracker._bug_id_counter = 42
issue = self._issue_tracker.GetIssue(self._issue_tracker.NewBug()['bug_id'])
grouped_anomalies = [self._AddAnomaly(), self._AddAnomaly()]
all_anomalies = grouped_anomalies + [self._AddAnomaly()]
group = self._AddAlertGroup(
grouped_anomalies[0],
issue=issue,
anomalies=grouped_anomalies,
status=alert_group.AlertGroup.Status.triaged,
)
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
config=alert_group_workflow.AlertGroupWorkflow.Config(
active_window=datetime.timedelta(days=7),
triage_delay=datetime.timedelta(hours=0),
),
)
u = alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(all_anomalies),
issue=issue,
)
self._UpdateTwice(workflow=w, update=u)
# First one is NewBug call in the test itself.
self.assertEqual(len(self._issue_tracker.calls), 2)
self.assertEqual(self._issue_tracker.calls[1]['method'], 'AddBugComment')
self.assertEqual(len(self._issue_tracker.calls[1]['args']), 2)
self.assertEqual(self._issue_tracker.calls[1]['args'][0], 42)
self.assertNotIn('was automatically merged into',
self._issue_tracker.calls[1]['args'][1])
self.assertIn('Alert group updated:',
self._issue_tracker.calls[1]['args'][1])
self.assertEqual(
self._issue_tracker.calls[1]['kwargs'], {
'summary':
'[%s]: %d regressions in %s' % ('sheriff', 3, 'test_suite'),
'labels': [
'Type-Bug-Regression', 'Chromeperf-Auto-Triaged',
'Restrict-View-Google', 'Pri-2'
],
'cc_list': [],
'components': ['Foo>Bar'],
'project':
'chromium',
'send_email':
False
})
self.assertTrue(all(a.get().bug_id == 42 for a in all_anomalies))
self.assertIsNone(group.get().canonical_group)
self.assertEqual(group.get().status, alert_group.AlertGroup.Status.triaged)
def testAutoMerge_SucessfulMerge_AutoMergeForOneAnomaly(self):
self._sheriff_config.patterns = {
'*auto_merge*': [
subscription.Subscription(
name='sheriff', auto_triage_enable=True, auto_merge_enable=True)
],
'*regular*': [
subscription.Subscription(name='sheriff', auto_triage_enable=True)
],
}
self._issue_tracker._bug_id_counter = 42
duplicate_issue = self._issue_tracker.GetIssue(
self._issue_tracker.NewBug(status='Duplicate',
state='closed')['bug_id'])
canonical_issue = self._issue_tracker.GetIssue(
self._issue_tracker.NewBug()['bug_id'])
grouped_anomalies = [
self._AddAnomaly(test='master/bot/regular_suite/measurement'),
self._AddAnomaly(test='master/bot/auto_merge_suite/measurement')
]
all_anomalies = grouped_anomalies + [
self._AddAnomaly(test='master/bot/regular_suite/measurement'),
]
group = self._AddAlertGroup(
grouped_anomalies[0],
issue=duplicate_issue,
anomalies=grouped_anomalies,
status=alert_group.AlertGroup.Status.triaged,
)
canonical_group = self._AddAlertGroup(
grouped_anomalies[0],
issue=canonical_issue,
status=alert_group.AlertGroup.Status.triaged,
)
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
config=alert_group_workflow.AlertGroupWorkflow.Config(
active_window=datetime.timedelta(days=7),
triage_delay=datetime.timedelta(hours=0),
),
)
u = alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(all_anomalies),
issue=duplicate_issue,
canonical_group=canonical_group.get(),
)
w.Process(update=u)
# First two are NewBug calls in the test itself.
self.assertEqual(len(self._issue_tracker.calls), 4)
self.assertEqual(self._issue_tracker.calls[2]['method'], 'AddBugComment')
self.assertEqual(len(self._issue_tracker.calls[2]['args']), 2)
self.assertEqual(self._issue_tracker.calls[2]['args'][0], 42)
self.assertIn(
'(%s) was automatically merged into %s' %
(group.string_id(), canonical_group.string_id()),
self._issue_tracker.calls[2]['args'][1])
self.assertEqual(self._issue_tracker.calls[2]['kwargs'], {
'project': 'chromium',
'send_email': False
})
self.assertEqual(
self._issue_tracker.calls[3], {
'method': 'AddBugComment',
'args': (42, None),
'kwargs': {
'summary':
'[%s]: %d regressions in %s' %
('sheriff', 3, 'regular_suite'),
'labels': [
'Type-Bug-Regression', 'Chromeperf-Auto-Triaged',
'Restrict-View-Google', 'Pri-2'
],
'cc_list': [],
'components': ['Foo>Bar'],
'project':
'chromium',
'send_email':
False
},
})
self.assertEqual(all_anomalies[0].get().bug_id, 42)
self.assertEqual(all_anomalies[1].get().bug_id, 43)
self.assertEqual(all_anomalies[2].get().bug_id, 42)
self.assertEqual(group.get().canonical_group, canonical_group)
self.assertEqual(group.get().status, alert_group.AlertGroup.Status.closed)
def testAutoMerge_SucessfulMerge_NoNewAnomalies(self):
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(
name='sheriff', auto_triage_enable=True, auto_merge_enable=True)
],
}
self._issue_tracker._bug_id_counter = 42
duplicate_issue = self._issue_tracker.GetIssue(
self._issue_tracker.NewBug(status='Duplicate',
state='closed')['bug_id'])
canonical_issue = self._issue_tracker.GetIssue(
self._issue_tracker.NewBug()['bug_id'])
grouped_anomalies = [self._AddAnomaly(), self._AddAnomaly()]
group = self._AddAlertGroup(
grouped_anomalies[0],
issue=duplicate_issue,
anomalies=grouped_anomalies,
status=alert_group.AlertGroup.Status.triaged,
)
canonical_group = self._AddAlertGroup(
grouped_anomalies[0],
issue=canonical_issue,
status=alert_group.AlertGroup.Status.triaged,
)
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
config=alert_group_workflow.AlertGroupWorkflow.Config(
active_window=datetime.timedelta(days=7),
triage_delay=datetime.timedelta(hours=0),
),
)
u = alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(grouped_anomalies),
issue=duplicate_issue,
canonical_group=canonical_group.get(),
)
w.Process(update=u)
# First two are NewBug calls in the test itself.
self.assertEqual(len(self._issue_tracker.calls), 3)
self.assertEqual(self._issue_tracker.calls[2]['method'], 'AddBugComment')
self.assertEqual(len(self._issue_tracker.calls[2]['args']), 2)
self.assertEqual(self._issue_tracker.calls[2]['args'][0], 42)
self.assertIn(
'(%s) was automatically merged into %s' %
(group.string_id(), canonical_group.string_id()),
self._issue_tracker.calls[2]['args'][1])
self.assertEqual(self._issue_tracker.calls[2]['kwargs'], {
'project': 'chromium',
'send_email': False
})
self.assertTrue(all(a.get().bug_id == 43 for a in grouped_anomalies))
self.assertEqual(group.get().canonical_group, canonical_group)
self.assertEqual(group.get().status, alert_group.AlertGroup.Status.closed)
def testAutoMerge_SeparatingGroups(self):
self._sheriff_config.patterns = {
'*': [
subscription.Subscription(
name='sheriff', auto_triage_enable=True, auto_merge_enable=True)
],
}
self._issue_tracker._bug_id_counter = 42
duplicate_issue = self._issue_tracker.GetIssue(
self._issue_tracker.NewBug()['bug_id'])
grouped_anomalies = [self._AddAnomaly(), self._AddAnomaly()]
all_anomalies = grouped_anomalies + [self._AddAnomaly()]
canonical_group = self._AddAlertGroup(
grouped_anomalies[0],
status=alert_group.AlertGroup.Status.triaged,
)
group = self._AddAlertGroup(
grouped_anomalies[0],
issue=duplicate_issue,
anomalies=grouped_anomalies,
status=alert_group.AlertGroup.Status.closed,
canonical_group=canonical_group)
w = alert_group_workflow.AlertGroupWorkflow(
group.get(),
sheriff_config=self._sheriff_config,
issue_tracker=self._issue_tracker,
config=alert_group_workflow.AlertGroupWorkflow.Config(
active_window=datetime.timedelta(days=7),
triage_delay=datetime.timedelta(hours=0),
),
)
u = alert_group_workflow.AlertGroupWorkflow.GroupUpdate(
now=datetime.datetime.utcnow(),
anomalies=ndb.get_multi(all_anomalies),
issue=duplicate_issue,
canonical_group=None,
)
w.Process(update=u)
# First one is NewBug calls in the test itself.
self.assertEqual(len(self._issue_tracker.calls), 2)
self.assertEqual(self._issue_tracker.calls[1]['method'], 'AddBugComment')
self.assertEqual(len(self._issue_tracker.calls[1]['args']), 2)
self.assertEqual(self._issue_tracker.calls[1]['args'][0], 42)
self.assertIn('Alert group updated:',
self._issue_tracker.calls[1]['args'][1])
self.assertIsNone(group.get().canonical_group)
self.assertEqual(group.get().status, alert_group.AlertGroup.Status.triaged)
def testPrepareGroupUpdate_DuplicateGroupFound(self):
base_anomaly = self._AddAnomaly()
self._issue_tracker._bug_id_counter = 42
canonical_issue = self._issue_tracker.GetIssue(
self._issue_tracker.NewBug()['bug_id'])
canonical_group = self._AddAlertGroup(
base_anomaly,
issue=canonical_issue,
status=alert_group.AlertGroup.Status.triaged,
)
canonical_anomalies = [
self._AddAnomaly(groups=[canonical_group]),
self._AddAnomaly(groups=[canonical_group])
]
duplicate_issue = self._issue_tracker.GetIssue(
self._issue_tracker.NewBug(status='Duplicate',
state='closed')['bug_id'])
duplicate_group = self._AddAlertGroup(
base_anomaly,
issue=duplicate_issue,
status=alert_group.AlertGroup.Status.triaged,
canonical_group=canonical_group,
)
duplicate_anomalies = [
self._AddAnomaly(groups=[duplicate_group]),
self._AddAnomaly(groups=[duplicate_group])
]
w = alert_group_workflow.AlertGroupWorkflow(
canonical_group.get(),
issue_tracker=self._issue_tracker,
config=alert_group_workflow.AlertGroupWorkflow.Config(
active_window=datetime.timedelta(days=7),
triage_delay=datetime.timedelta(hours=0),
),
)
update = w._PrepareGroupUpdate()
self.assertEqual(
update.anomalies,
[a.get() for a in canonical_anomalies + duplicate_anomalies])
self.assertIsNotNone(update.issue)
self.assertIsNone(update.canonical_group)
def testPrepareGroupUpdate_CanonicalGroupFound(self):
base_anomaly = self._AddAnomaly()
self._issue_tracker._bug_id_counter = 42
duplicate_issue = self._issue_tracker.GetIssue(
self._issue_tracker.NewBug(status='Duplicate',
state='closed')['bug_id'])
duplicate_group = self._AddAlertGroup(
base_anomaly,
issue=duplicate_issue,
status=alert_group.AlertGroup.Status.triaged,
)
anomalies = [
self._AddAnomaly(groups=[duplicate_group]),
self._AddAnomaly(groups=[duplicate_group])
]
canonical_issue = self._issue_tracker.GetIssue(
self._issue_tracker.NewBug()['bug_id'])
canonical_group = self._AddAlertGroup(
base_anomaly,
issue=canonical_issue,
status=alert_group.AlertGroup.Status.triaged,
)
self._issue_tracker.issue_comments.update({
('chromium', duplicate_issue['id']): [
{
'id': 2,
'updates': {
'status': 'Duplicate',
# According to Monorail API documentation, mergedInto
# has string type.
'mergedInto': str(canonical_issue['id'])
},
},
{
'id': 1,
'updates': {
'status': 'WontFix'
},
}
]
})
w = alert_group_workflow.AlertGroupWorkflow(
duplicate_group.get(),
issue_tracker=self._issue_tracker,
config=alert_group_workflow.AlertGroupWorkflow.Config(
active_window=datetime.timedelta(days=7),
triage_delay=datetime.timedelta(hours=0),
),
)
update = w._PrepareGroupUpdate()
self.assertEqual(update.anomalies, [a.get() for a in anomalies])
self.assertIsNotNone(update.issue)
self.assertEqual(update.canonical_group, canonical_group.get())
def testPrepareGroupUpdate_CanonicalGroupLoop(self):
base_anomaly = self._AddAnomaly()
self._issue_tracker._bug_id_counter = 42
duplicate_issue = self._issue_tracker.GetIssue(
self._issue_tracker.NewBug(status='Duplicate',
state='closed')['bug_id'])
duplicate_group = self._AddAlertGroup(
base_anomaly,
issue=duplicate_issue,
status=alert_group.AlertGroup.Status.triaged,
)
looped_issue = self._issue_tracker.GetIssue(
self._issue_tracker.NewBug()['bug_id'])
looped_group = self._AddAlertGroup(
base_anomaly,
issue=looped_issue,
status=alert_group.AlertGroup.Status.triaged,
canonical_group=duplicate_group,
)
canonical_issue = self._issue_tracker.GetIssue(
self._issue_tracker.NewBug()['bug_id'])
self._AddAlertGroup(
base_anomaly,
issue=canonical_issue,
status=alert_group.AlertGroup.Status.triaged,
canonical_group=looped_group,
)
self._issue_tracker.issue_comments.update({
('chromium', duplicate_issue['id']): [{
'id': 2,
'updates': {
'status': 'Duplicate',
# According to Monorail API documentation, mergedInto
# has string type.
'mergedInto': str(canonical_issue['id'])
},
}]
})
w = alert_group_workflow.AlertGroupWorkflow(
duplicate_group.get(),
issue_tracker=self._issue_tracker,
config=alert_group_workflow.AlertGroupWorkflow.Config(
active_window=datetime.timedelta(days=7),
triage_delay=datetime.timedelta(hours=0),
),
)
update = w._PrepareGroupUpdate()
self.assertIsNone(update.canonical_group)
| 34.727512
| 99
| 0.629106
| 8,001
| 79,144
| 5.936508
| 0.053868
| 0.078824
| 0.089941
| 0.078066
| 0.887911
| 0.874521
| 0.863678
| 0.854162
| 0.847657
| 0.84132
| 0
| 0.008823
| 0.258213
| 79,144
| 2,278
| 100
| 34.742757
| 0.800232
| 0.02076
| 0
| 0.775607
| 0
| 0.000953
| 0.069746
| 0.020099
| 0
| 0
| 0
| 0.000439
| 0.080991
| 1
| 0.024297
| false
| 0
| 0.00667
| 0
| 0.033826
| 0.000476
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6abc784b678877e65c72b6b4b691f8fb993e0ae2
| 62,125
|
py
|
Python
|
entsoe/entsoe.py
|
lpirl/entsoe-py
|
edc9f08e8ce1622023498696435765a0b47d6964
|
[
"MIT"
] | null | null | null |
entsoe/entsoe.py
|
lpirl/entsoe-py
|
edc9f08e8ce1622023498696435765a0b47d6964
|
[
"MIT"
] | null | null | null |
entsoe/entsoe.py
|
lpirl/entsoe-py
|
edc9f08e8ce1622023498696435765a0b47d6964
|
[
"MIT"
] | null | null | null |
import logging
from typing import Union, Optional, Dict
import pandas as pd
from pandas.tseries.offsets import YearBegin, YearEnd
import pytz
import requests
from bs4 import BeautifulSoup
from entsoe.exceptions import InvalidPSRTypeError, InvalidBusinessParameterError
from .exceptions import NoMatchingDataError, PaginationError
from .mappings import Area, NEIGHBOURS, lookup_area
from .parsers import parse_prices, parse_loads, parse_generation, \
parse_installed_capacity_per_plant, parse_crossborder_flows, \
parse_unavailabilities, parse_contracted_reserve, parse_imbalance_prices_zip, \
parse_imbalance_volumes_zip, parse_netpositions, parse_procured_balancing_capacity
from .decorators import retry, paginated, year_limited, day_limited, documents_limited
__title__ = "entsoe-py"
__version__ = "0.4.4"
__author__ = "EnergieID.be"
__license__ = "MIT"
URL = 'https://transparency.entsoe.eu/api'
class EntsoeRawClient:
# noinspection LongLine
"""
Client to perform API calls and return the raw responses API-documentation:
https://transparency.entsoe.eu/content/static_content/Static%20content/web%20api/Guide.html#_request_methods
Attributions: Parts of the code for parsing Entsoe responses were copied
from https://github.com/tmrowco/electricitymap
"""
def __init__(
self, api_key: str, session: Optional[requests.Session] = None,
retry_count: int = 1, retry_delay: int = 0,
proxies: Optional[Dict] = None, timeout: Optional[int] = None):
"""
Parameters
----------
api_key : str
session : requests.Session
retry_count : int
number of times to retry the call if the connection fails
retry_delay: int
amount of seconds to wait between retries
proxies : dict
requests proxies
timeout : int
"""
if api_key is None:
raise TypeError("API key cannot be None")
self.api_key = api_key
if session is None:
session = requests.Session()
self.session = session
self.proxies = proxies
self.retry_count = retry_count
self.retry_delay = retry_delay
self.timeout = timeout
@retry
def _base_request(self, params: Dict, start: pd.Timestamp,
end: pd.Timestamp) -> requests.Response:
"""
Parameters
----------
params : dict
start : pd.Timestamp
end : pd.Timestamp
Returns
-------
requests.Response
"""
start_str = self._datetime_to_str(start)
end_str = self._datetime_to_str(end)
base_params = {
'securityToken': self.api_key,
'periodStart': start_str,
'periodEnd': end_str
}
params.update(base_params)
logging.debug(f'Performing request to {URL} with params {params}')
response = self.session.get(url=URL, params=params,
proxies=self.proxies, timeout=self.timeout)
try:
response.raise_for_status()
except requests.HTTPError as e:
soup = BeautifulSoup(response.text, 'html.parser')
text = soup.find_all('text')
if len(text):
error_text = soup.find('text').text
if 'No matching data found' in error_text:
raise NoMatchingDataError
elif "check you request against dependency tables" in error_text:
raise InvalidBusinessParameterError
elif "is not valid for this area" in error_text:
raise InvalidPSRTypeError
elif 'amount of requested data exceeds allowed limit' in error_text:
requested = error_text.split(' ')[-2]
allowed = error_text.split(' ')[-5]
raise PaginationError(
f"The API is limited to {allowed} elements per "
f"request. This query requested for {requested} "
f"documents and cannot be fulfilled as is.")
elif 'requested data to be gathered via the offset parameter exceeds the allowed limit' in error_text:
requested = error_text.split(' ')[-9]
allowed = error_text.split(' ')[-30][:-2]
raise PaginationError(
f"The API is limited to {allowed} elements per "
f"request. This query requested for {requested} "
f"documents and cannot be fulfilled as is.")
raise e
else:
# ENTSO-E has changed their server to also respond with 200 if there is no data but all parameters are valid
# this means we need to check the contents for this error even when status code 200 is returned
# to prevent parsing the full response do a text matching instead of full parsing
# also only do this when response type content is text and not for example a zip file
if response.headers.get('content-type', '') == 'application/xml':
if 'No matching data found' in response.text:
raise NoMatchingDataError
return response
@staticmethod
def _datetime_to_str(dtm: pd.Timestamp) -> str:
"""
Convert a datetime object to a string in UTC
of the form YYYYMMDDhh00
Parameters
----------
dtm : pd.Timestamp
Recommended to use a timezone-aware object!
If timezone-naive, UTC is assumed
Returns
-------
str
"""
if dtm.tzinfo is not None and dtm.tzinfo != pytz.UTC:
dtm = dtm.tz_convert("UTC")
fmt = '%Y%m%d%H00'
ret_str = dtm.strftime(fmt)
return ret_str
def query_day_ahead_prices(self, country_code: Union[Area, str],
start: pd.Timestamp, end: pd.Timestamp) -> str:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
Returns
-------
str
"""
area = lookup_area(country_code)
params = {
'documentType': 'A44',
'in_Domain': area.code,
'out_Domain': area.code
}
response = self._base_request(params=params, start=start, end=end)
return response.text
def query_net_position_dayahead(self, country_code: Union[Area, str],
start: pd.Timestamp, end: pd.Timestamp) -> str:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
Returns
-------
str
"""
area = lookup_area(country_code)
params = {
'documentType': 'A25', # Allocation result document
'businessType': 'B09', # net position
'Contract_MarketAgreement.Type': 'A01', # daily
'in_Domain': area.code,
'out_Domain': area.code
}
response = self._base_request(params=params, start=start, end=end)
return response.text
def query_load(self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp) -> str:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
Returns
-------
str
"""
area = lookup_area(country_code)
params = {
'documentType': 'A65',
'processType': 'A16',
'outBiddingZone_Domain': area.code,
'out_Domain': area.code
}
response = self._base_request(params=params, start=start, end=end)
return response.text
def query_load_forecast(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, process_type: str = 'A01') -> str:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
process_type : str
Returns
-------
str
"""
area = lookup_area(country_code)
params = {
'documentType': 'A65',
'processType': process_type,
'outBiddingZone_Domain': area.code,
# 'out_Domain': domain
}
response = self._base_request(params=params, start=start, end=end)
return response.text
def query_generation_forecast(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, process_type: str = 'A01') -> str:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
process_type : str
Returns
-------
str
"""
area = lookup_area(country_code)
params = {
'documentType': 'A71',
'processType': process_type,
'in_Domain': area.code,
}
response = self._base_request(params=params, start=start, end=end)
return response.text
def query_wind_and_solar_forecast(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, psr_type: Optional[str] = None,
process_type: str = 'A01', **kwargs) -> str:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
psr_type : str
filter on a single psr type
process_type : str
Returns
-------
str
"""
area = lookup_area(country_code)
params = {
'documentType': 'A69',
'processType': process_type,
'in_Domain': area.code,
}
if psr_type:
params.update({'psrType': psr_type})
response = self._base_request(params=params, start=start, end=end)
return response.text
def query_generation(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, psr_type: Optional[str] = None, **kwargs) -> str:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
psr_type : str
filter on a single psr type
Returns
-------
str
"""
area = lookup_area(country_code)
params = {
'documentType': 'A75',
'processType': 'A16',
'in_Domain': area.code,
}
if psr_type:
params.update({'psrType': psr_type})
response = self._base_request(params=params, start=start, end=end)
return response.text
def query_generation_per_plant(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, psr_type: Optional[str] = None, **kwargs) -> str:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
psr_type : str
filter on a single psr type
Returns
-------
str
"""
area = lookup_area(country_code)
params = {
'documentType': 'A73',
'processType': 'A16',
'in_Domain': area.code,
}
if psr_type:
params.update({'psrType': psr_type})
response = self._base_request(params=params, start=start, end=end)
return response.text
def query_installed_generation_capacity(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, psr_type: Optional[str] = None) -> str:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
psr_type : str
filter query for a specific psr type
Returns
-------
str
"""
area = lookup_area(country_code)
params = {
'documentType': 'A68',
'processType': 'A33',
'in_Domain': area.code,
}
if psr_type:
params.update({'psrType': psr_type})
response = self._base_request(params=params, start=start, end=end)
return response.text
def query_installed_generation_capacity_per_unit(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, psr_type: Optional[str] = None) -> str:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
psr_type : str
filter query for a specific psr type
Returns
-------
str
"""
area = lookup_area(country_code)
params = {
'documentType': 'A71',
'processType': 'A33',
'in_Domain': area.code,
}
if psr_type:
params.update({'psrType': psr_type})
response = self._base_request(params=params, start=start, end=end)
return response.text
def query_crossborder_flows(
self, country_code_from: Union[Area, str],
country_code_to: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, **kwargs) -> str:
"""
Parameters
----------
country_code_from : Area|str
country_code_to : Area|str
start : pd.Timestamp
end : pd.Timestamp
Returns
-------
str
"""
return self._query_crossborder(
country_code_from=country_code_from,
country_code_to=country_code_to, start=start, end=end,
doctype="A11", contract_marketagreement_type=None)
def query_scheduled_exchanges(
self, country_code_from: Union[Area, str],
country_code_to: Union[Area, str],
start: pd.Timestamp,
end: pd.Timestamp,
dayahead: bool = False,
**kwargs) -> str:
"""
Parameters
----------
country_code_from : Area|str
country_code_to : Area|str
dayahead : bool
start : pd.Timestamp
end : pd.Timestamp
Returns
-------
str
"""
if dayahead:
contract_marketagreement_type = "A01"
else:
contract_marketagreement_type = "A05"
return self._query_crossborder(
country_code_from=country_code_from,
country_code_to=country_code_to, start=start, end=end,
doctype="A09", contract_marketagreement_type=contract_marketagreement_type)
def query_net_transfer_capacity_dayahead(
self, country_code_from: Union[Area, str],
country_code_to: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp) -> str:
"""
Parameters
----------
country_code_from : Area|str
country_code_to : Area|str
start : pd.Timestamp
end : pd.Timestamp
Returns
-------
str
"""
return self._query_crossborder(
country_code_from=country_code_from,
country_code_to=country_code_to, start=start, end=end,
doctype="A61", contract_marketagreement_type="A01")
def query_net_transfer_capacity_weekahead(
self, country_code_from: Union[Area, str],
country_code_to: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp) -> str:
"""
Parameters
----------
country_code_from : Area|str
country_code_to : Area|str
start : pd.Timestamp
end : pd.Timestamp
Returns
-------
str
"""
return self._query_crossborder(
country_code_from=country_code_from,
country_code_to=country_code_to, start=start, end=end,
doctype="A61", contract_marketagreement_type="A02")
def query_net_transfer_capacity_monthahead(
self, country_code_from: Union[Area, str],
country_code_to: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp) -> str:
"""
Parameters
----------
country_code_from : Area|str
country_code_to : Area|str
start : pd.Timestamp
end : pd.Timestamp
Returns
-------
str
"""
return self._query_crossborder(
country_code_from=country_code_from,
country_code_to=country_code_to, start=start, end=end,
doctype="A61", contract_marketagreement_type="A03")
def query_net_transfer_capacity_yearahead(
self, country_code_from: Union[Area, str],
country_code_to: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp) -> str:
"""
Parameters
----------
country_code_from : Area|str
country_code_to : Area|str
start : pd.Timestamp
end : pd.Timestamp
Returns
-------
str
"""
return self._query_crossborder(
country_code_from=country_code_from,
country_code_to=country_code_to, start=start, end=end,
doctype="A61", contract_marketagreement_type="A04")
def query_intraday_offered_capacity(
self, country_code_from: Union[Area, str],
country_code_to: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, implicit:bool = True,**kwargs) -> str:
"""
Parameters
----------
country_code_from : Area|str
country_code_to : Area|str
start : pd.Timestamp
end : pd.Timestamp
implicit: bool (True = implicit - default for most borders. False = explicit - for instance BE-GB)
Returns
-------
str
"""
return self._query_crossborder(
country_code_from=country_code_from,
country_code_to=country_code_to, start=start, end=end,
doctype="A31", contract_marketagreement_type="A07",
auction_type=("A01" if implicit==True else "A02"))
def _query_crossborder(
self, country_code_from: Union[Area, str],
country_code_to: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, doctype: str,
contract_marketagreement_type: Optional[str] = None,
auction_type: Optional[str] = None) -> str:
"""
Generic function called by query_crossborder_flows,
query_scheduled_exchanges, query_net_transfer_capacity_DA/WA/MA/YA and query_.
Parameters
----------
country_code_from : Area|str
country_code_to : Area|str
start : pd.Timestamp
end : pd.Timestamp
doctype: str
contract_marketagreement_type: str
Returns
-------
str
"""
area_in = lookup_area(country_code_to)
area_out = lookup_area(country_code_from)
params = {
'documentType': doctype,
'in_Domain': area_in.code,
'out_Domain': area_out.code
}
if contract_marketagreement_type is not None:
params[
'contract_MarketAgreement.Type'] = contract_marketagreement_type
if auction_type is not None:
params[
'Auction.Type'] = auction_type
response = self._base_request(params=params, start=start, end=end)
return response.text
def query_imbalance_prices(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, psr_type: Optional[str] = None) -> bytes:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
psr_type : str
filter query for a specific psr type
Returns
-------
bytes
"""
area = lookup_area(country_code)
params = {
'documentType': 'A85',
'controlArea_Domain': area.code,
}
if psr_type:
params.update({'psrType': psr_type})
response = self._base_request(params=params, start=start, end=end)
return response.content
def query_imbalance_volumes(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, psr_type: Optional[str] = None) -> bytes:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
psr_type : str
filter query for a specific psr type
Returns
-------
bytes
"""
area = lookup_area(country_code)
params = {
'documentType': 'A86',
'controlArea_Domain': area.code,
}
if psr_type:
params.update({'psrType': psr_type})
response = self._base_request(params=params, start=start, end=end)
return response.content
def query_procured_balancing_capacity(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, process_type: str,
type_marketagreement_type: Optional[str] = None) -> bytes:
"""
Activated Balancing Energy [17.1.E]
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
process_type : str
A51 ... aFRR; A47 ... mFRR
type_marketagreement_type : str
type of contract (see mappings.MARKETAGREEMENTTYPE)
Returns
-------
bytes
"""
if process_type not in ['A51', 'A47']:
raise ValueError('processType allowed values: A51, A47')
area = lookup_area(country_code)
params = {
'documentType': 'A15',
'area_Domain': area.code,
'processType': process_type
}
if type_marketagreement_type:
params.update({'type_MarketAgreement.Type': type_marketagreement_type})
response = self._base_request(params=params, start=start, end=end)
return response.content
def query_activated_balancing_energy(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, business_type: str,
psr_type: Optional[str] = None) -> bytes:
"""
Activated Balancing Energy [17.1.E]
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
business_type : str
type of contract (see mappings.BSNTYPE)
psr_type : str
filter query for a specific psr type
Returns
-------
bytes
"""
area = lookup_area(country_code)
params = {
'documentType': 'A83',
'controlArea_Domain': area.code,
'businessType': business_type
}
if psr_type:
params.update({'psrType': psr_type})
response = self._base_request(params=params, start=start, end=end)
return response.content
def query_contracted_reserve_prices(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, type_marketagreement_type: str,
psr_type: Optional[str] = None,
offset: int = 0) -> str:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
type_marketagreement_type : str
type of contract (see mappings.MARKETAGREEMENTTYPE)
psr_type : str
filter query for a specific psr type
offset : int
offset for querying more than 100 documents
Returns
-------
str
"""
area = lookup_area(country_code)
params = {
'documentType': 'A89',
'controlArea_Domain': area.code,
'type_MarketAgreement.Type': type_marketagreement_type,
'offset': offset
}
if psr_type:
params.update({'psrType': psr_type})
response = self._base_request(params=params, start=start, end=end)
return response.text
def query_contracted_reserve_amount(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, type_marketagreement_type: str,
psr_type: Optional[str] = None,
offset: int = 0) -> str:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
type_marketagreement_type : str
type of contract (see mappings.MARKETAGREEMENTTYPE)
psr_type : str
filter query for a specific psr type
offset : int
offset for querying more than 100 documents
Returns
-------
str
"""
area = lookup_area(country_code)
params = {
'documentType': 'A81',
'controlArea_Domain': area.code,
'type_MarketAgreement.Type': type_marketagreement_type,
'offset': offset
}
if psr_type:
params.update({'psrType': psr_type})
response = self._base_request(params=params, start=start, end=end)
return response.text
def _query_unavailability(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, doctype: str, docstatus: Optional[str] = None,
periodstartupdate: Optional[pd.Timestamp] = None,
periodendupdate: Optional[pd.Timestamp] = None,
offset: int = 0) -> bytes:
"""
Generic unavailibility query method.
This endpoint serves ZIP files.
The query is limited to 200 items per request.
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
doctype : str
docstatus : str, optional
periodstartupdate : pd.Timestamp, optional
periodendupdate : pd.Timestamp, optional
offset : int
Returns
-------
bytes
"""
area = lookup_area(country_code)
params = {
'documentType': doctype,
'biddingZone_domain': area.code,
'offset': offset
# ,'businessType': 'A53 (unplanned) | A54 (planned)'
}
if docstatus:
params['docStatus'] = docstatus
if periodstartupdate and periodendupdate:
params['periodStartUpdate'] = self._datetime_to_str(
periodstartupdate)
params['periodEndUpdate'] = self._datetime_to_str(periodendupdate)
response = self._base_request(params=params, start=start, end=end)
return response.content
def query_unavailability_of_generation_units(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, docstatus: Optional[str] = None,
periodstartupdate: Optional[pd.Timestamp] = None,
periodendupdate: Optional[pd.Timestamp] = None,
offset: int = 0) -> bytes:
"""
This endpoint serves ZIP files.
The query is limited to 200 items per request.
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
docstatus : str, optional
periodstartupdate : pd.Timestamp, optional
periodendupdate : pd.Timestamp, optional
offset : int
Returns
-------
bytes
"""
content = self._query_unavailability(
country_code=country_code, start=start, end=end, doctype="A80",
docstatus=docstatus, periodstartupdate=periodstartupdate,
periodendupdate=periodendupdate, offset=offset)
return content
def query_unavailability_of_production_units(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, docstatus: Optional[str] = None,
periodstartupdate: Optional[pd.Timestamp] = None,
periodendupdate: Optional[pd.Timestamp] = None) -> bytes:
"""
This endpoint serves ZIP files.
The query is limited to 200 items per request.
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
docstatus : str, optional
periodstartupdate : pd.Timestamp, optional
periodendupdate : pd.Timestamp, optional
Returns
-------
bytes
"""
content = self._query_unavailability(
country_code=country_code, start=start, end=end, doctype="A77",
docstatus=docstatus, periodstartupdate=periodstartupdate,
periodendupdate=periodendupdate)
return content
def query_unavailability_transmission(
self, country_code_from: Union[Area, str],
country_code_to: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, docstatus: Optional[str] = None,
periodstartupdate: Optional[pd.Timestamp] = None,
periodendupdate: Optional[pd.Timestamp] = None,
offset: int = 0,
**kwargs) -> bytes:
"""
Generic unavailibility query method.
This endpoint serves ZIP files.
The query is limited to 200 items per request.
Parameters
----------
country_code_from : Area|str
country_code_to : Area|str
start : pd.Timestamp
end : pd.Timestamp
docstatus : str, optional
periodstartupdate : pd.Timestamp, optional
periodendupdate : pd.Timestamp, optional
offset : int
Returns
-------
bytes
"""
area_in = lookup_area(country_code_to)
area_out = lookup_area(country_code_from)
params = {
'documentType': "A78",
'in_Domain': area_in.code,
'out_Domain': area_out.code,
'offset': offset
}
if docstatus:
params['docStatus'] = docstatus
if periodstartupdate and periodendupdate:
params['periodStartUpdate'] = self._datetime_to_str(
periodstartupdate)
params['periodEndUpdate'] = self._datetime_to_str(periodendupdate)
response = self._base_request(params=params, start=start, end=end)
return response.content
def query_withdrawn_unavailability_of_generation_units(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp) -> bytes:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
Returns
-------
bytes
"""
content = self._query_unavailability(
country_code=country_code, start=start, end=end,
doctype="A80", docstatus='A13')
return content
class EntsoePandasClient(EntsoeRawClient):
@year_limited
def query_net_position_dayahead(self, country_code: Union[Area, str],
start: pd.Timestamp, end: pd.Timestamp) -> pd.Series:
"""
Parameters
----------
country_code
start
end
Returns
-------
"""
area = lookup_area(country_code)
text = super(EntsoePandasClient, self).query_net_position_dayahead(
country_code=area, start=start, end=end)
series = parse_netpositions(text)
series = series.tz_convert(area.tz)
series = series.truncate(before=start, after=end)
return series
@year_limited
def query_day_ahead_prices(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp) -> pd.Series:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
Returns
-------
pd.Series
"""
area = lookup_area(country_code)
text = super(EntsoePandasClient, self).query_day_ahead_prices(
country_code=area, start=start, end=end)
series = parse_prices(text)
series = series.tz_convert(area.tz)
series = series.truncate(before=start, after=end)
return series
@year_limited
def query_load(self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp) -> pd.DataFrame:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
Returns
-------
pd.Series
"""
area = lookup_area(country_code)
text = super(EntsoePandasClient, self).query_load(
country_code=area, start=start, end=end)
series = parse_loads(text, process_type='A16')
series = series.tz_convert(area.tz)
series = series.truncate(before=start, after=end)
return series
@year_limited
def query_load_forecast(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, process_type: str = 'A01') -> pd.DataFrame:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
process_type : str
Returns
-------
pd.DataFrame
"""
area = lookup_area(country_code)
text = super(EntsoePandasClient, self).query_load_forecast(
country_code=area, start=start, end=end, process_type=process_type)
df = parse_loads(text, process_type=process_type)
df = df.tz_convert(area.tz)
df = df.truncate(before=start, after=end)
return df
def query_load_and_forecast(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp) -> pd.DataFrame:
"""
utility function to combina query realised load and forecasted day ahead load.
this mimics the html view on the page Total Load - Day Ahead / Actual
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
Returns
-------
pd.DataFrame
"""
df_load_forecast_da = self.query_load_forecast(country_code, start=start, end=end)
df_load = self.query_load(country_code, start=start, end=end)
return df_load_forecast_da.join(df_load, sort=True, how='inner')
@year_limited
def query_generation_forecast(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, process_type: str = 'A01',
nett: bool = False) -> Union[pd.DataFrame, pd.Series]:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
process_type : str
nett : bool
condense generation and consumption into a nett number
Returns
-------
pd.DataFrame | pd.Series
"""
area = lookup_area(country_code)
text = super(EntsoePandasClient, self).query_generation_forecast(
country_code=area, start=start, end=end, process_type=process_type)
df = parse_generation(text, nett=nett)
df = df.tz_convert(area.tz)
df = df.truncate(before=start, after=end)
return df
@year_limited
def query_wind_and_solar_forecast(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, psr_type: Optional[str] = None,
process_type: str = 'A01', **kwargs) -> pd.DataFrame:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
psr_type : str
filter on a single psr type
process_type : str
Returns
-------
pd.DataFrame
"""
area = lookup_area(country_code)
text = super(EntsoePandasClient, self).query_wind_and_solar_forecast(
country_code=area, start=start, end=end, psr_type=psr_type,
process_type=process_type)
df = parse_generation(text, nett=True)
df = df.tz_convert(area.tz)
df = df.truncate(before=start, after=end)
return df
@year_limited
def query_generation(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, psr_type: Optional[str] = None,
nett: bool = False, **kwargs) -> pd.DataFrame:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
psr_type : str
filter on a single psr type
nett : bool
condense generation and consumption into a nett number
Returns
-------
pd.DataFrame
"""
area = lookup_area(country_code)
text = super(EntsoePandasClient, self).query_generation(
country_code=area, start=start, end=end, psr_type=psr_type)
df = parse_generation(text, nett=nett)
df = df.tz_convert(area.tz)
df = df.truncate(before=start, after=end)
return df
@year_limited
def query_installed_generation_capacity(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, psr_type: Optional[str] = None) -> pd.DataFrame:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
psr_type : str
filter query for a specific psr type
Returns
-------
pd.DataFrame
"""
area = lookup_area(country_code)
text = super(
EntsoePandasClient, self).query_installed_generation_capacity(
country_code=area, start=start, end=end, psr_type=psr_type)
df = parse_generation(text)
df = df.tz_convert(area.tz)
# Truncate to YearBegin and YearEnd, because answer is always year-based
df = df.truncate(before=start - YearBegin(), after=end + YearEnd())
return df
@year_limited
def query_installed_generation_capacity_per_unit(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, psr_type: Optional[str] = None) -> pd.DataFrame:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
psr_type : str
filter query for a specific psr type
Returns
-------
pd.DataFrame
"""
area = lookup_area(country_code)
text = super(
EntsoePandasClient,
self).query_installed_generation_capacity_per_unit(
country_code=area, start=start, end=end, psr_type=psr_type)
df = parse_installed_capacity_per_plant(text)
return df
@year_limited
def query_crossborder_flows(
self, country_code_from: Union[Area, str],
country_code_to: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, **kwargs) -> pd.Series:
"""
Note: Result will be in the timezone of the origin country
Parameters
----------
country_code_from : Area|str
country_code_to : Area|str
start : pd.Timestamp
end : pd.Timestamp
Returns
-------
pd.Series
"""
area_to = lookup_area(country_code_to)
area_from = lookup_area(country_code_from)
text = super(EntsoePandasClient, self).query_crossborder_flows(
country_code_from=area_from,
country_code_to=area_to,
start=start,
end=end)
ts = parse_crossborder_flows(text)
ts = ts.tz_convert(area_from.tz)
ts = ts.truncate(before=start, after=end)
return ts
@year_limited
def query_scheduled_exchanges(
self, country_code_from: Union[Area, str],
country_code_to: Union[Area, str],
start: pd.Timestamp,
end: pd.Timestamp,
dayahead: bool = False,
**kwargs) -> pd.Series:
"""
Note: Result will be in the timezone of the origin country
Parameters
----------
country_code_from : Area|str
country_code_to : Area|str
dayahead : bool
start : pd.Timestamp
end : pd.Timestamp
Returns
-------
pd.Series
"""
area_to = lookup_area(country_code_to)
area_from = lookup_area(country_code_from)
text = super(EntsoePandasClient, self).query_scheduled_exchanges(
country_code_from=area_from,
country_code_to=area_to,
dayahead=dayahead,
start=start,
end=end)
ts = parse_crossborder_flows(text)
ts = ts.tz_convert(area_from.tz)
ts = ts.truncate(before=start, after=end)
return ts
@year_limited
def query_net_transfer_capacity_dayahead(
self, country_code_from: Union[Area, str],
country_code_to: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, **kwargs) -> pd.Series:
"""
Note: Result will be in the timezone of the origin country
Parameters
----------
country_code_from : Area|str
country_code_to : Area|str
start : pd.Timestamp
end : pd.Timestamp
Returns
-------
pd.Series
"""
area_to = lookup_area(country_code_to)
area_from = lookup_area(country_code_from)
text = super(EntsoePandasClient, self).query_net_transfer_capacity_dayahead(
country_code_from=area_from,
country_code_to=area_to,
start=start,
end=end)
ts = parse_crossborder_flows(text)
ts = ts.tz_convert(area_from.tz)
ts = ts.truncate(before=start, after=end)
return ts
@year_limited
def query_net_transfer_capacity_weekahead(
self, country_code_from: Union[Area, str],
country_code_to: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, **kwargs) -> pd.Series:
"""
Note: Result will be in the timezone of the origin country
Parameters
----------
country_code_from : Area|str
country_code_to : Area|str
start : pd.Timestamp
end : pd.Timestamp
Returns
-------
pd.Series
"""
area_to = lookup_area(country_code_to)
area_from = lookup_area(country_code_from)
text = super(EntsoePandasClient, self).query_net_transfer_capacity_weekahead(
country_code_from=area_from,
country_code_to=area_to,
start=start,
end=end)
ts = parse_crossborder_flows(text)
ts = ts.tz_convert(area_from.tz)
ts = ts.truncate(before=start, after=end)
return ts
@year_limited
def query_net_transfer_capacity_monthahead(
self, country_code_from: Union[Area, str],
country_code_to: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, **kwargs) -> pd.Series:
"""
Note: Result will be in the timezone of the origin country
Parameters
----------
country_code_from : Area|str
country_code_to : Area|str
start : pd.Timestamp
end : pd.Timestamp
Returns
-------
pd.Series
"""
area_to = lookup_area(country_code_to)
area_from = lookup_area(country_code_from)
text = super(EntsoePandasClient, self).query_net_transfer_capacity_monthahead(
country_code_from=area_from,
country_code_to=area_to,
start=start,
end=end)
ts = parse_crossborder_flows(text)
ts = ts.tz_convert(area_from.tz)
ts = ts.truncate(before=start, after=end)
return ts
@year_limited
def query_net_transfer_capacity_yearahead(
self, country_code_from: Union[Area, str],
country_code_to: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, **kwargs) -> pd.Series:
"""
Note: Result will be in the timezone of the origin country
Parameters
----------
country_code_from : Area|str
country_code_to : Area|str
start : pd.Timestamp
end : pd.Timestamp
Returns
-------
pd.Series
"""
area_to = lookup_area(country_code_to)
area_from = lookup_area(country_code_from)
text = super(EntsoePandasClient, self).query_net_transfer_capacity_yearahead(
country_code_from=area_from,
country_code_to=area_to,
start=start,
end=end)
ts = parse_crossborder_flows(text)
ts = ts.tz_convert(area_from.tz)
ts = ts.truncate(before=start, after=end)
return ts
@year_limited
def query_intraday_offered_capacity(
self, country_code_from: Union[Area, str],
country_code_to: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, implicit:bool = True, **kwargs) -> pd.Series:
"""
Note: Result will be in the timezone of the origin country --> to check
Parameters
----------
country_code_from : Area|str
country_code_to : Area|str
start : pd.Timestamp
end : pd.Timestamp
implicit: bool (True = implicit - default for most borders. False = explicit - for instance BE-GB)
Returns
-------
pd.Series
"""
area_to = lookup_area(country_code_to)
area_from = lookup_area(country_code_from)
text = super(EntsoePandasClient, self).query_intraday_offered_capacity(
country_code_from=area_from,
country_code_to=area_to,
start=start,
end=end,
implicit=implicit)
ts = parse_crossborder_flows(text)
ts = ts.tz_convert(area_from.tz)
ts = ts.truncate(before=start, after=end)
return ts
@year_limited
def query_imbalance_prices(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, psr_type: Optional[str] = None) -> pd.DataFrame:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
psr_type : str
filter query for a specific psr type
Returns
-------
pd.DataFrame
"""
area = lookup_area(country_code)
archive = super(EntsoePandasClient, self).query_imbalance_prices(
country_code=area, start=start, end=end, psr_type=psr_type)
df = parse_imbalance_prices_zip(zip_contents=archive)
df = df.tz_convert(area.tz)
df = df.truncate(before=start, after=end)
return df
@year_limited
def query_imbalance_volumes(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, psr_type: Optional[str] = None) -> pd.DataFrame:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
psr_type : str
filter query for a specific psr type
Returns
-------
pd.DataFrame
"""
area = lookup_area(country_code)
archive = super(EntsoePandasClient, self).query_imbalance_volumes(
country_code=area, start=start, end=end, psr_type=psr_type)
df = parse_imbalance_volumes_zip(zip_contents=archive)
df = df.tz_convert(area.tz)
df = df.truncate(before=start, after=end)
return df
@year_limited
@paginated
def query_procured_balancing_capacity(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, process_type: str,
type_marketagreement_type: Optional[str] = None) -> bytes:
"""
Activated Balancing Energy [17.1.E]
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
process_type : str
A51 ... aFRR; A47 ... mFRR
type_marketagreement_type : str
type of contract (see mappings.MARKETAGREEMENTTYPE)
Returns
-------
pd.DataFrame
"""
area = lookup_area(country_code)
text = super(EntsoePandasClient, self).query_procured_balancing_capacity(
country_code=area, start=start, end=end,
process_type=process_type, type_marketagreement_type=type_marketagreement_type)
df = parse_procured_balancing_capacity(text, area.tz)
df = df.tz_convert(area.tz)
df = df.truncate(before=start, after=end)
return df
@year_limited
def query_activated_balancing_energy(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, business_type: str,
psr_type: Optional[str] = None) -> pd.DataFrame:
"""
Activated Balancing Energy [17.1.E]
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
business_type: str
type of contract (see mappings.BSNTYPE)
psr_type : str
filter query for a specific psr type
Returns
-------
pd.DataFrame
"""
area = lookup_area(country_code)
text = super(EntsoePandasClient, self).query_activated_balancing_energy(
country_code=area, start=start, end=end,
business_type=business_type, psr_type=psr_type)
df = parse_contracted_reserve(text, area.tz, "quantity")
df = df.tz_convert(area.tz)
df = df.truncate(before=start, after=end)
return df
@year_limited
@paginated
@documents_limited(100)
def query_contracted_reserve_prices(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, type_marketagreement_type: str,
psr_type: Optional[str] = None,
offset: int = 0) -> pd.DataFrame:
"""
Parameters
----------
country_code : Area, str
start : pd.Timestamp
end : pd.Timestamp
type_marketagreement_type : str
type of contract (see mappings.MARKETAGREEMENTTYPE)
psr_type : str
filter query for a specific psr type
offset : int
offset for querying more than 100 documents
Returns
-------
pd.DataFrame
"""
area = lookup_area(country_code)
text = super(EntsoePandasClient, self).query_contracted_reserve_prices(
country_code=area, start=start, end=end,
type_marketagreement_type=type_marketagreement_type,
psr_type=psr_type, offset=offset)
df = parse_contracted_reserve(text, area.tz, "procurement_price.amount")
df = df.tz_convert(area.tz)
df = df.truncate(before=start, after=end)
return df
@year_limited
@paginated
@documents_limited(100)
def query_contracted_reserve_amount(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, type_marketagreement_type: str,
psr_type: Optional[str] = None,
offset: int = 0) -> pd.DataFrame:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
type_marketagreement_type : str
type of contract (see mappings.MARKETAGREEMENTTYPE)
psr_type : str
filter query for a specific psr type
offset : int
offset for querying more than 100 documents
Returns
-------
pd.DataFrame
"""
area = lookup_area(country_code)
text = super(EntsoePandasClient, self).query_contracted_reserve_amount(
country_code=area, start=start, end=end,
type_marketagreement_type=type_marketagreement_type,
psr_type=psr_type, offset=offset)
df = parse_contracted_reserve(text, area.tz, "quantity")
df = df.tz_convert(area.tz)
df = df.truncate(before=start, after=end)
return df
@year_limited
@paginated
@documents_limited(200)
def _query_unavailability(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, doctype: str, docstatus: Optional[str] = None,
periodstartupdate: Optional[pd.Timestamp] = None,
periodendupdate: Optional[pd.Timestamp] = None,
offset: int = 0) -> pd.DataFrame:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
doctype : str
docstatus : str, optional
periodstartupdate : pd.Timestamp, optional
periodendupdate : pd.Timestamp, optional
offset : int
Returns
-------
pd.DataFrame
"""
area = lookup_area(country_code)
content = super(EntsoePandasClient, self)._query_unavailability(
country_code=area, start=start, end=end, doctype=doctype,
docstatus=docstatus, periodstartupdate=periodstartupdate,
periodendupdate=periodendupdate, offset=offset)
df = parse_unavailabilities(content, doctype)
df = df.tz_convert(area.tz)
df['start'] = df['start'].apply(lambda x: x.tz_convert(area.tz))
df['end'] = df['end'].apply(lambda x: x.tz_convert(area.tz))
df = df[(df['start'] < end) | (df['end'] > start)]
return df
def query_unavailability_of_generation_units(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, docstatus: Optional[str] = None,
periodstartupdate: Optional[pd.Timestamp] = None,
periodendupdate: Optional[pd.Timestamp] = None) -> pd.DataFrame:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
docstatus : str, optional
periodstartupdate : pd.Timestamp, optional
periodendupdate : pd.Timestamp, optional
Returns
-------
pd.DataFrame
"""
df = self._query_unavailability(
country_code=country_code, start=start, end=end, doctype="A80",
docstatus=docstatus, periodstartupdate=periodstartupdate,
periodendupdate=periodendupdate)
return df
def query_unavailability_of_production_units(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, docstatus: Optional[str] = None,
periodstartupdate: Optional[pd.Timestamp] = None,
periodendupdate: Optional[pd.Timestamp] = None) -> pd.DataFrame:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
docstatus : str, optional
periodstartupdate : pd.Timestamp, optional
periodendupdate : pd.Timestamp, optional
Returns
-------
pd.DataFrame
"""
df = self._query_unavailability(
country_code=country_code, start=start, end=end, doctype="A77",
docstatus=docstatus, periodstartupdate=periodstartupdate,
periodendupdate=periodendupdate)
return df
@paginated
def query_unavailability_transmission(
self, country_code_from: Union[Area, str],
country_code_to: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, docstatus: Optional[str] = None,
periodstartupdate: Optional[pd.Timestamp] = None,
periodendupdate: Optional[pd.Timestamp] = None,
offset: int = 0,
**kwargs) -> pd.DataFrame:
"""
Parameters
----------
country_code_from : Area|str
country_code_to : Area|str
start : pd.Timestamp
end : pd.Timestamp
docstatus : str, optional
periodstartupdate : pd.Timestamp, optional
periodendupdate : pd.Timestamp, optional
offset : int
Returns
-------
pd.DataFrame
"""
area_to = lookup_area(country_code_to)
area_from = lookup_area(country_code_from)
content = super(EntsoePandasClient,
self).query_unavailability_transmission(
area_from, area_to, start, end, docstatus, periodstartupdate,
periodendupdate, offset=offset)
df = parse_unavailabilities(content, "A78")
df = df.tz_convert(area_from.tz)
df['start'] = df['start'].apply(lambda x: x.tz_convert(area_from.tz))
df['end'] = df['end'].apply(lambda x: x.tz_convert(area_from.tz))
df = df[(df['start'] < end) | (df['end'] > start)]
return df
def query_withdrawn_unavailability_of_generation_units(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp) -> pd.DataFrame:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
Returns
-------
pd.DataFrame
"""
df = self.query_unavailability_of_generation_units(
country_code=country_code, start=start, end=end, docstatus='A13')
df = df[(df['start'] < end) | (df['end'] > start)]
return df
@day_limited
def query_generation_per_plant(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp, psr_type: Optional[str] = None,
include_eic: bool = False,
nett: bool = False, **kwargs) -> pd.DataFrame:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp
psr_type : str
filter on a single psr type
nett : bool
condense generation and consumption into a nett number
include_eic: bool
if True also include the eic code in the output
Returns
-------
pd.DataFrame
"""
area = lookup_area(country_code)
text = super(EntsoePandasClient, self).query_generation_per_plant(
country_code=area, start=start, end=end, psr_type=psr_type)
df = parse_generation(text, per_plant=True, include_eic=include_eic)
df.columns = df.columns.set_levels(df.columns.levels[0].str.encode('latin-1').str.decode('utf-8'), level=0)
df = df.tz_convert(area.tz)
# Truncation will fail if data is not sorted along the index in rare
# cases. Ensure the dataframe is sorted:
df = df.sort_index(0)
df = df.truncate(before=start, after=end)
return df
def query_import(self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp) -> pd.DataFrame:
"""
Adds together all incoming cross-border flows to a country
The neighbours of a country are given by the NEIGHBOURS mapping
"""
area = lookup_area(country_code)
imports = []
for neighbour in NEIGHBOURS[area.name]:
try:
im = self.query_crossborder_flows(country_code_from=neighbour,
country_code_to=country_code,
end=end,
start=start,
lookup_bzones=True)
except NoMatchingDataError:
continue
im.name = neighbour
imports.append(im)
df = pd.concat(imports, axis=1)
# drop columns that contain only zero's
df = df.loc[:, (df != 0).any(axis=0)]
df = df.tz_convert(area.tz)
df = df.truncate(before=start, after=end)
return df
def query_generation_import(
self, country_code: Union[Area, str], start: pd.Timestamp,
end: pd.Timestamp) -> pd.DataFrame:
"""Query the combination of both domestic generation and imports"""
generation = self.query_generation(country_code=country_code, end=end,
start=start, lookup_bzones=True)
generation = generation.loc[:, (generation != 0).any(
axis=0)] # drop columns that contain only zero's
generation = generation.resample('H').sum()
imports = self.query_import(country_code=country_code, start=start,
end=end)
data = {f'Generation': generation, f'Import': imports}
df = pd.concat(data.values(), axis=1, keys=data.keys())
df = df.truncate(before=start, after=end)
return df
| 33.911026
| 120
| 0.572861
| 6,607
| 62,125
| 5.189345
| 0.070229
| 0.092078
| 0.055533
| 0.065945
| 0.844718
| 0.831126
| 0.814677
| 0.80231
| 0.792685
| 0.779619
| 0
| 0.005288
| 0.327243
| 62,125
| 1,831
| 121
| 33.929547
| 0.81505
| 0.221553
| 0
| 0.714448
| 0
| 0
| 0.048431
| 0.004724
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071672
| false
| 0
| 0.021615
| 0
| 0.166098
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6ad500028db882c012d2e87030d5c74d7434b458
| 20,272
|
py
|
Python
|
plugins/minfraud/komand_minfraud/actions/device_lookup/schema.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 46
|
2019-06-05T20:47:58.000Z
|
2022-03-29T10:18:01.000Z
|
plugins/minfraud/komand_minfraud/actions/device_lookup/schema.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 386
|
2019-06-07T20:20:39.000Z
|
2022-03-30T17:35:01.000Z
|
plugins/minfraud/komand_minfraud/actions/device_lookup/schema.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 43
|
2019-07-09T14:13:58.000Z
|
2022-03-28T12:04:46.000Z
|
# GENERATED BY KOMAND SDK - DO NOT EDIT
import komand
import json
class Component:
DESCRIPTION = "Query device info"
class Input:
ACCEPT_LANGUAGE = "accept_language"
ADDRESS = "address"
USER_AGENT = "user_agent"
class Output:
DEVICE_RESULT = "device_result"
IP_RESULT = "ip_result"
RISK_SCORE = "risk_score"
class DeviceLookupInput(komand.Input):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"accept_language": {
"type": "string",
"title": "Accept-Language",
"description": "HTTP Accept-Language header",
"order": 3
},
"address": {
"type": "string",
"title": "IP Address",
"description": "IP address to query",
"order": 1
},
"user_agent": {
"type": "string",
"title": "User-Agent",
"description": "HTTP User-Agent header",
"order": 2
}
},
"required": [
"address"
]
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
class DeviceLookupOutput(komand.Output):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"device_result": {
"$ref": "#/definitions/device",
"title": "Device Result",
"description": "Results for device",
"order": 1
},
"ip_result": {
"$ref": "#/definitions/ip",
"title": "Ip Result",
"description": "Results for IP",
"order": 2
},
"risk_score": {
"type": "string",
"title": "Risk Score",
"description": "Overall risk score",
"order": 3
}
},
"definitions": {
"city": {
"type": "object",
"title": "city",
"properties": {
"confidence": {
"type": "integer",
"title": "Confidence",
"description": "City confidence",
"order": 1
},
"geoname_id": {
"type": "integer",
"title": "Geoname Id",
"description": "City geoname ID",
"order": 2
},
"name": {
"type": "string",
"title": "Name",
"description": "City name",
"order": 3
}
}
},
"continent": {
"type": "object",
"title": "continent",
"properties": {
"code": {
"type": "string",
"title": "Code",
"description": "Continent code",
"order": 1
},
"geoname_id": {
"type": "integer",
"title": "Geoname Id",
"description": "Continent geoname ID",
"order": 2
},
"name": {
"type": "string",
"title": "Name",
"description": "Continent name",
"order": 3
}
}
},
"country": {
"type": "object",
"title": "country",
"properties": {
"confidence": {
"type": "integer",
"title": "Confidence",
"description": "Country confidence",
"order": 1
},
"geoname_id": {
"type": "integer",
"title": "Geoname Id",
"description": "Country geoname ID",
"order": 2
},
"is_high_risk": {
"type": "boolean",
"title": "Is High Risk",
"description": "Is country high risk",
"order": 3
},
"iso_code": {
"type": "string",
"title": "Iso Code",
"description": "Country ISO code",
"order": 4
},
"name": {
"type": "string",
"title": "Name",
"description": "country name",
"order": 5
}
}
},
"device": {
"type": "object",
"title": "device",
"properties": {
"confidence": {
"type": "integer",
"title": "Confidence",
"description": "Device confidence",
"order": 1
},
"id": {
"type": "string",
"title": "Id",
"description": "Device ID",
"order": 2
},
"last_seen": {
"type": "string",
"title": "Last Seen",
"description": "Device last seen",
"order": 3
}
}
},
"ip": {
"type": "object",
"title": "ip",
"properties": {
"city": {
"$ref": "#/definitions/city",
"title": "City",
"description": "City associated with IP",
"order": 2
},
"continent": {
"$ref": "#/definitions/continent",
"title": "Continent",
"description": "Continent associated with IP",
"order": 3
},
"country": {
"$ref": "#/definitions/country",
"title": "Country",
"description": "Country associated with IP",
"order": 4
},
"location": {
"$ref": "#/definitions/location",
"title": "Location",
"description": "Location associated with IP",
"order": 5
},
"postal": {
"$ref": "#/definitions/postal",
"title": "Postal",
"description": "Postal associated with IP",
"order": 6
},
"registered_country": {
"$ref": "#/definitions/registered_country",
"title": "Registered Country",
"description": "Country where IP is registered",
"order": 7
},
"represented_country": {
"$ref": "#/definitions/represented_country",
"title": "Represented Country",
"description": "Country where IP is represented",
"order": 8
},
"risk": {
"type": "string",
"title": "Risk",
"description": "IP risk score",
"order": 1
},
"subdivisions": {
"$ref": "#/definitions/subdivisions",
"title": "Subdivisions",
"description": "Subdivisions data",
"order": 9
},
"traits": {
"$ref": "#/definitions/traits",
"title": "Traits",
"description": "Traits data",
"order": 10
}
},
"definitions": {
"city": {
"type": "object",
"title": "city",
"properties": {
"confidence": {
"type": "integer",
"title": "Confidence",
"description": "City confidence",
"order": 1
},
"geoname_id": {
"type": "integer",
"title": "Geoname Id",
"description": "City geoname ID",
"order": 2
},
"name": {
"type": "string",
"title": "Name",
"description": "City name",
"order": 3
}
}
},
"continent": {
"type": "object",
"title": "continent",
"properties": {
"code": {
"type": "string",
"title": "Code",
"description": "Continent code",
"order": 1
},
"geoname_id": {
"type": "integer",
"title": "Geoname Id",
"description": "Continent geoname ID",
"order": 2
},
"name": {
"type": "string",
"title": "Name",
"description": "Continent name",
"order": 3
}
}
},
"country": {
"type": "object",
"title": "country",
"properties": {
"confidence": {
"type": "integer",
"title": "Confidence",
"description": "Country confidence",
"order": 1
},
"geoname_id": {
"type": "integer",
"title": "Geoname Id",
"description": "Country geoname ID",
"order": 2
},
"is_high_risk": {
"type": "boolean",
"title": "Is High Risk",
"description": "Is country high risk",
"order": 3
},
"iso_code": {
"type": "string",
"title": "Iso Code",
"description": "Country ISO code",
"order": 4
},
"name": {
"type": "string",
"title": "Name",
"description": "country name",
"order": 5
}
}
},
"location": {
"type": "object",
"title": "location",
"properties": {
"accuracy_raidus": {
"type": "integer",
"title": "Accuracy Raidus",
"description": "Accuracy Radius",
"order": 1
},
"latitude": {
"type": "string",
"title": "Latitude",
"description": "Latitude of location",
"order": 2
},
"local_time": {
"type": "string",
"title": "Local Time",
"description": "Local time of location",
"order": 3
},
"longitude": {
"type": "string",
"title": "Longitude",
"description": "Longitude of location",
"order": 4
},
"metro_code": {
"type": "integer",
"title": "Metro Code",
"description": "Metro code",
"order": 5
},
"time_zone": {
"type": "string",
"title": "Time Zone",
"description": "Time zone of location",
"order": 6
}
}
},
"postal": {
"type": "object",
"title": "postal",
"properties": {
"code": {
"type": "integer",
"title": "Code",
"description": "Postal code",
"order": 1
},
"confidence": {
"type": "integer",
"title": "Confidence",
"description": "Postal confidence",
"order": 2
}
}
},
"registered_country": {
"type": "object",
"title": "registered_country",
"properties": {
"geoname_id": {
"type": "integer",
"title": "Geoname Id",
"description": "Registered country geoname ID",
"order": 1
},
"iso_code": {
"type": "string",
"title": "Iso Code",
"description": "Registered country ISO code",
"order": 2
},
"name": {
"type": "string",
"title": "Name",
"description": "Registered country name",
"order": 3
}
}
},
"represented_country": {
"type": "object",
"title": "represented_country",
"properties": {
"_type": {
"type": "string",
"title": "Type",
"description": "Represented country type",
"order": 4
},
"geoname_id": {
"type": "integer",
"title": "Geoname Id",
"description": "Represented country geoname ID",
"order": 1
},
"iso_code": {
"type": "string",
"title": "Iso Code",
"description": "Represented country ISO code",
"order": 2
},
"name": {
"type": "string",
"title": "Name",
"description": "Represented country name",
"order": 3
}
}
},
"subdivisions": {
"type": "object",
"title": "subdivisions",
"properties": {
"confidence": {
"type": "integer",
"title": "Confidence",
"description": "Subdivision confidence",
"order": 1
},
"geoname_id": {
"type": "integer",
"title": "Geoname Id",
"description": "Subdivision geoname ID",
"order": 2
},
"iso_code": {
"type": "string",
"title": "Iso Code",
"description": "Subdivision ISO code",
"order": 3
},
"name": {
"type": "string",
"title": "Name",
"description": "Subdivision name",
"order": 4
}
}
},
"traits": {
"type": "object",
"title": "traits",
"properties": {
"autonomous_system_number": {
"type": "integer",
"title": "Autonomous System Number",
"description": "Autonomous system number",
"order": 1
},
"autonomous_system_organization": {
"type": "string",
"title": "Autonomous System Organization",
"description": "Organization associated with ASN",
"order": 2
},
"domain": {
"type": "string",
"title": "Domain",
"description": "Domain",
"order": 3
},
"ip_address": {
"type": "string",
"title": "Ip Address",
"description": "IP address",
"order": 7
},
"is_anonymous_proxy": {
"type": "boolean",
"title": "Is Anonymous Proxy",
"description": "Is IP an anonymous proxy",
"order": 4
},
"is_satellite_provider": {
"type": "boolean",
"title": "Is Satellite Provider",
"description": "Is IP a satellite provider",
"order": 5
},
"isp": {
"type": "string",
"title": "Isp",
"description": "ISP associated with IP",
"order": 6
},
"organization": {
"type": "string",
"title": "Organization",
"description": "Organization associated with IP",
"order": 8
},
"user_type": {
"type": "string",
"title": "User Type",
"description": "User Type",
"order": 9
}
}
}
}
},
"location": {
"type": "object",
"title": "location",
"properties": {
"accuracy_raidus": {
"type": "integer",
"title": "Accuracy Raidus",
"description": "Accuracy Radius",
"order": 1
},
"latitude": {
"type": "string",
"title": "Latitude",
"description": "Latitude of location",
"order": 2
},
"local_time": {
"type": "string",
"title": "Local Time",
"description": "Local time of location",
"order": 3
},
"longitude": {
"type": "string",
"title": "Longitude",
"description": "Longitude of location",
"order": 4
},
"metro_code": {
"type": "integer",
"title": "Metro Code",
"description": "Metro code",
"order": 5
},
"time_zone": {
"type": "string",
"title": "Time Zone",
"description": "Time zone of location",
"order": 6
}
}
},
"postal": {
"type": "object",
"title": "postal",
"properties": {
"code": {
"type": "integer",
"title": "Code",
"description": "Postal code",
"order": 1
},
"confidence": {
"type": "integer",
"title": "Confidence",
"description": "Postal confidence",
"order": 2
}
}
},
"registered_country": {
"type": "object",
"title": "registered_country",
"properties": {
"geoname_id": {
"type": "integer",
"title": "Geoname Id",
"description": "Registered country geoname ID",
"order": 1
},
"iso_code": {
"type": "string",
"title": "Iso Code",
"description": "Registered country ISO code",
"order": 2
},
"name": {
"type": "string",
"title": "Name",
"description": "Registered country name",
"order": 3
}
}
},
"represented_country": {
"type": "object",
"title": "represented_country",
"properties": {
"_type": {
"type": "string",
"title": "Type",
"description": "Represented country type",
"order": 4
},
"geoname_id": {
"type": "integer",
"title": "Geoname Id",
"description": "Represented country geoname ID",
"order": 1
},
"iso_code": {
"type": "string",
"title": "Iso Code",
"description": "Represented country ISO code",
"order": 2
},
"name": {
"type": "string",
"title": "Name",
"description": "Represented country name",
"order": 3
}
}
},
"subdivisions": {
"type": "object",
"title": "subdivisions",
"properties": {
"confidence": {
"type": "integer",
"title": "Confidence",
"description": "Subdivision confidence",
"order": 1
},
"geoname_id": {
"type": "integer",
"title": "Geoname Id",
"description": "Subdivision geoname ID",
"order": 2
},
"iso_code": {
"type": "string",
"title": "Iso Code",
"description": "Subdivision ISO code",
"order": 3
},
"name": {
"type": "string",
"title": "Name",
"description": "Subdivision name",
"order": 4
}
}
},
"traits": {
"type": "object",
"title": "traits",
"properties": {
"autonomous_system_number": {
"type": "integer",
"title": "Autonomous System Number",
"description": "Autonomous system number",
"order": 1
},
"autonomous_system_organization": {
"type": "string",
"title": "Autonomous System Organization",
"description": "Organization associated with ASN",
"order": 2
},
"domain": {
"type": "string",
"title": "Domain",
"description": "Domain",
"order": 3
},
"ip_address": {
"type": "string",
"title": "Ip Address",
"description": "IP address",
"order": 7
},
"is_anonymous_proxy": {
"type": "boolean",
"title": "Is Anonymous Proxy",
"description": "Is IP an anonymous proxy",
"order": 4
},
"is_satellite_provider": {
"type": "boolean",
"title": "Is Satellite Provider",
"description": "Is IP a satellite provider",
"order": 5
},
"isp": {
"type": "string",
"title": "Isp",
"description": "ISP associated with IP",
"order": 6
},
"organization": {
"type": "string",
"title": "Organization",
"description": "Organization associated with IP",
"order": 8
},
"user_type": {
"type": "string",
"title": "User Type",
"description": "User Type",
"order": 9
}
}
}
}
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
| 26.779392
| 64
| 0.404943
| 1,424
| 20,272
| 5.690309
| 0.076545
| 0.06294
| 0.094409
| 0.029619
| 0.800444
| 0.797729
| 0.789337
| 0.782303
| 0.770209
| 0.763915
| 0
| 0.008562
| 0.435379
| 20,272
| 756
| 65
| 26.814815
| 0.699371
| 0.001825
| 0
| 0.69852
| 1
| 0
| 0.973113
| 0.034103
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002692
| false
| 0
| 0.002692
| 0
| 0.024226
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6ae38a3103a05f7f8603c9526ca6678ce7901459
| 8,668
|
py
|
Python
|
tests/visualization_tests/test_optimization_history.py
|
captain-pool/optuna
|
2ae8c17afea54362460320870304c763e91c0596
|
[
"MIT"
] | 1,300
|
2018-12-03T06:11:11.000Z
|
2019-11-15T01:28:25.000Z
|
tests/visualization_tests/test_optimization_history.py
|
captain-pool/optuna
|
2ae8c17afea54362460320870304c763e91c0596
|
[
"MIT"
] | 274
|
2018-12-04T09:54:07.000Z
|
2019-11-15T02:23:18.000Z
|
tests/visualization_tests/test_optimization_history.py
|
captain-pool/optuna
|
2ae8c17afea54362460320870304c763e91c0596
|
[
"MIT"
] | 148
|
2018-12-03T10:48:50.000Z
|
2019-11-11T16:37:51.000Z
|
import numpy as np
import pytest
from optuna.study import create_study
from optuna.trial import Trial
from optuna.visualization import plot_optimization_history
def test_target_is_none_and_study_is_multi_obj() -> None:
study = create_study(directions=["minimize", "minimize"])
with pytest.raises(ValueError):
plot_optimization_history(study)
@pytest.mark.parametrize("direction", ["minimize", "maximize"])
def test_plot_optimization_history(direction: str) -> None:
# Test with no trial.
study = create_study(direction=direction)
figure = plot_optimization_history(study)
assert len(figure.data) == 0
def objective(trial: Trial) -> float:
if trial.number == 0:
return 1.0
elif trial.number == 1:
return 2.0
elif trial.number == 2:
return 0.0
return 0.0
# Test with a trial.
study = create_study(direction=direction)
study.optimize(objective, n_trials=3)
figure = plot_optimization_history(study)
assert len(figure.data) == 2
assert np.array_equal(figure.data[0].x, [0, 1, 2])
assert np.array_equal(figure.data[0].y, [1.0, 2.0, 0.0])
assert np.array_equal(figure.data[1].x, [0, 1, 2])
ydata = figure.data[1].y
if direction == "minimize":
assert np.array_equal(ydata, [1.0, 1.0, 0.0])
else:
assert np.array_equal(ydata, [1.0, 2.0, 2.0])
legend_texts = [x.name for x in figure.data]
assert legend_texts == ["Objective Value", "Best Value"]
assert figure.layout.yaxis.title.text == "Objective Value"
# Test customized target.
with pytest.warns(UserWarning):
figure = plot_optimization_history(study, target=lambda t: t.number)
assert len(figure.data) == 1
assert np.array_equal(figure.data[0].x, [0, 1, 2])
assert np.array_equal(figure.data[0].y, [0.0, 1.0, 2.0])
# Test customized target name.
custom_target_name = "Target Name"
figure = plot_optimization_history(study, target_name=custom_target_name)
legend_texts = [x.name for x in figure.data]
assert legend_texts == [custom_target_name, "Best Value"]
assert figure.layout.yaxis.title.text == custom_target_name
# Ignore failed trials.
def fail_objective(_: Trial) -> float:
raise ValueError
study = create_study(direction=direction)
study.optimize(fail_objective, n_trials=1, catch=(ValueError,))
figure = plot_optimization_history(study)
assert len(figure.data) == 0
@pytest.mark.parametrize("direction", ["minimize", "maximize"])
def test_plot_optimization_history_with_multiple_studies(direction: str) -> None:
n_studies = 10
# Test with no trial.
studies = [create_study(direction=direction) for _ in range(n_studies)]
figure = plot_optimization_history(studies)
assert len(figure.data) == 0
def objective(trial: Trial) -> float:
if trial.number == 0:
return 1.0
elif trial.number == 1:
return 2.0
elif trial.number == 2:
return 0.0
return 0.0
# Test with trials.
studies = [create_study(direction=direction) for _ in range(n_studies)]
for study in studies:
study.optimize(objective, n_trials=3)
figure = plot_optimization_history(studies)
assert len(figure.data) == 2 * n_studies
assert np.array_equal(figure.data[0].x, [0, 1, 2])
assert np.array_equal(figure.data[0].y, [1.0, 2.0, 0.0])
assert np.array_equal(figure.data[1].x, [0, 1, 2])
ydata = figure.data[1].y
if direction == "minimize":
assert np.array_equal(ydata, [1.0, 1.0, 0.0])
else:
assert np.array_equal(ydata, [1.0, 2.0, 2.0])
expected_legend_texts = []
for i in range(n_studies):
expected_legend_texts.append(f"Best Value of {studies[i].study_name}")
expected_legend_texts.append(f"Objective Value of {studies[i].study_name}")
legend_texts = [scatter.name for scatter in figure.data]
assert sorted(legend_texts) == sorted(expected_legend_texts)
assert figure.layout.yaxis.title.text == "Objective Value"
# Test customized target.
with pytest.warns(UserWarning):
figure = plot_optimization_history(studies, target=lambda t: t.number)
assert len(figure.data) == 1 * n_studies
assert np.array_equal(figure.data[0].x, [0, 1, 2])
assert np.array_equal(figure.data[0].y, [0, 1, 2])
# Test customized target name.
custom_target_name = "Target Name"
figure = plot_optimization_history(studies, target_name=custom_target_name)
expected_legend_texts = []
for i in range(n_studies):
expected_legend_texts.append(f"Best Value of {studies[i].study_name}")
expected_legend_texts.append(f"{custom_target_name} of {studies[i].study_name}")
legend_texts = [scatter.name for scatter in figure.data]
assert sorted(legend_texts) == sorted(expected_legend_texts)
assert figure.layout.yaxis.title.text == custom_target_name
# Ignore failed trials.
def fail_objective(_: Trial) -> float:
raise ValueError
studies = [create_study(direction=direction) for _ in range(n_studies)]
for study in studies:
study.optimize(fail_objective, n_trials=1, catch=(ValueError,))
figure = plot_optimization_history(studies)
assert len(figure.data) == 0
@pytest.mark.parametrize("direction", ["minimize", "maximize"])
def test_plot_optimization_history_with_error_bar(direction: str) -> None:
n_studies = 10
# Test with no trial.
studies = [create_study(direction=direction) for _ in range(n_studies)]
figure = plot_optimization_history(studies, error_bar=True)
assert len(figure.data) == 0
def objective(trial: Trial) -> float:
if trial.number == 0:
return 1.0
elif trial.number == 1:
return 2.0
elif trial.number == 2:
return 0.0
return 0.0
# Test with trials.
studies = [create_study(direction=direction) for _ in range(n_studies)]
for study in studies:
study.optimize(objective, n_trials=3)
figure = plot_optimization_history(studies, error_bar=True)
assert len(figure.data) == 4
assert np.array_equal(figure.data[0].x, [0, 1, 2])
assert np.array_equal(figure.data[0].y, [1.0, 2.0, 0.0])
assert np.array_equal(figure.data[1].x, [0, 1, 2])
ydata = figure.data[1].y
if direction == "minimize":
assert np.array_equal(ydata, [1.0, 1.0, 0.0])
else:
assert np.array_equal(ydata, [1.0, 2.0, 2.0])
# Scatters for error bar don't have `name`.
legend_texts = [scatter.name for scatter in figure.data if scatter.name is not None]
assert sorted(legend_texts) == ["Best Value", "Objective Value"]
assert figure.layout.yaxis.title.text == "Objective Value"
# Test customized target.
with pytest.warns(UserWarning):
figure = plot_optimization_history(studies, target=lambda t: t.number, error_bar=True)
assert len(figure.data) == 1
assert np.array_equal(figure.data[0].x, [0, 1, 2])
assert np.array_equal(figure.data[0].y, [0, 1, 2])
# Test customized target name.
custom_target_name = "Target Name"
figure = plot_optimization_history(studies, target_name=custom_target_name, error_bar=True)
legend_texts = [scatter.name for scatter in figure.data if scatter.name is not None]
assert sorted(legend_texts) == ["Best Value", custom_target_name]
assert figure.layout.yaxis.title.text == custom_target_name
# Ignore failed trials.
def fail_objective(_: Trial) -> float:
raise ValueError
studies = [create_study(direction=direction) for _ in range(n_studies)]
for study in studies:
study.optimize(fail_objective, n_trials=1, catch=(ValueError,))
figure = plot_optimization_history(studies, error_bar=True)
assert len(figure.data) == 0
@pytest.mark.parametrize("direction", ["minimize", "maximize"])
def test_error_bar_in_optimization_history(direction: str) -> None:
def objective(trial: Trial) -> float:
return trial.suggest_float("x", 0, 1)
studies = [create_study(direction=direction) for _ in range(3)]
suggested_params = [0.1, 0.3, 0.2]
for x, study in zip(suggested_params, studies):
study.enqueue_trial({"x": x})
study.optimize(objective, n_trials=1)
figure = plot_optimization_history(studies, error_bar=True)
mean = np.mean(suggested_params)
std = np.std(suggested_params)
np.testing.assert_almost_equal(figure.data[0].y, mean)
np.testing.assert_almost_equal(figure.data[2].y, mean + std)
np.testing.assert_almost_equal(figure.data[3].y, mean - std)
| 38.017544
| 95
| 0.680434
| 1,234
| 8,668
| 4.610211
| 0.08671
| 0.068553
| 0.084901
| 0.066444
| 0.891545
| 0.868694
| 0.857268
| 0.824222
| 0.805238
| 0.79768
| 0
| 0.025759
| 0.198316
| 8,668
| 227
| 96
| 38.185022
| 0.79292
| 0.043955
| 0
| 0.758824
| 0
| 0
| 0.054776
| 0.011125
| 0
| 0
| 0
| 0
| 0.282353
| 1
| 0.070588
| false
| 0
| 0.029412
| 0.005882
| 0.176471
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0ac4e48cf16a5788526fd72ae3eff0bd6d43a0a2
| 16,224
|
py
|
Python
|
Tests/test_integration_manage_faculty.py
|
avirois/ProjectManagement
|
9e05d1302336ca4ef32bef93e28bcc69fdad7d4f
|
[
"WTFPL"
] | null | null | null |
Tests/test_integration_manage_faculty.py
|
avirois/ProjectManagement
|
9e05d1302336ca4ef32bef93e28bcc69fdad7d4f
|
[
"WTFPL"
] | 3
|
2021-03-24T11:53:18.000Z
|
2021-04-05T10:20:58.000Z
|
Tests/test_integration_manage_faculty.py
|
avirois/ProjectManagement
|
9e05d1302336ca4ef32bef93e28bcc69fdad7d4f
|
[
"WTFPL"
] | null | null | null |
import pytest
from selenium import webdriver
from time import sleep
from selenium.webdriver.support.select import Select
import sqlite3
from static.classes.User import User, encryptPassword, decryptPassword
# Global variable
# Prepare the user and password for test
username_test = "test111"
password_test = "Aa123456!!"
institution_test = "SCE_Test"
faculty_test = "Chemistry_Test"
instID = 0
facID = 0
# Create new institution test
newFacTest = "Software_Test"
@pytest.fixture
def db_prepare_manage_fac():
global instID, facID
# Prepare the institution
db_name = "database.db"
# connect to db to prepare it before testing
con = sqlite3.connect(db_name)
cursor=con.cursor()
# Check if institution exists
sqlQueryCheckExist = "SELECT * FROM Institutions WHERE InstitutionName = (?)"
sqlRes = con.execute(sqlQueryCheckExist, (institution_test,))
record = sqlRes.fetchone()
# If institution does not exists create it
if record == None:
sqtInsertInst = "INSERT INTO Institutions (InstitutionName) VALUES (?)"
cursor.execute(sqtInsertInst, (institution_test,))
instID = cursor.lastrowid
else:
instID = record[0]
# Check if faculty exists
sqlQueryCheckExist = "SELECT * FROM Faculties WHERE FacultyName = (?)"
sqlRes = con.execute(sqlQueryCheckExist, (faculty_test,))
record = sqlRes.fetchone()
# If faculty does not exists create it
if record == None:
sqlInsertFac = "INSERT INTO Faculties (FacultyName) VALUES (?)"
cursor.execute(sqlInsertFac, (faculty_test,))
facID = cursor.lastrowid
else:
facID = record[0]
# Check if institution and faculty exists in FacIn table
sqlQueryCheckExist = "SELECT * FROM FacIn WHERE InstitutionID = (?) AND FacultyID = (?)"
sqlRes = con.execute(sqlQueryCheckExist, (instID, facID))
record = sqlRes.fetchone()
# If institution and faculty does not exists create it
if record == None:
sqtInsertInstFac = "INSERT INTO FacIn VALUES (?, ?)"
con.execute(sqtInsertInstFac, (instID, facID))
# Check if user exists in Users table
sqlQueryCheckExist = "SELECT * FROM Users WHERE UserName = (?)"
sqlRes = con.execute(sqlQueryCheckExist, (username_test,))
record = sqlRes.fetchone()
# If user does not exists create it
if record == None:
sqtInsertUser = "INSERT INTO Users VALUES (?,?, ?, ?, ?, ?, ?, 1, 0, ?)"
con.execute(sqtInsertUser, (username_test, "test1", "test1", encryptPassword(password_test), instID, facID, 2, ""))
# Commit the changes in users table
con.commit()
#----------------------------------------------------------------
yield db_name
# Check if user exists
sqlQueryCheckExist = "SELECT * FROM Users WHERE UserName = (?)"
sqlRes = con.execute(sqlQueryCheckExist, (username_test,))
record = sqlRes.fetchone()
# If user exists delete the user from DB
if record != None:
sqlDelete = "DELETE FROM Users WHERE UserName = (?)"
sqlRes = con.execute(sqlDelete, (username_test,))
# Check if institution and faculty exists in FacIn table
sqlQueryCheckExist = "SELECT * FROM FacIn WHERE InstitutionID = (?) AND FacultyID = (?)"
sqlRes = con.execute(sqlQueryCheckExist, (instID, facID))
record = sqlRes.fetchone()
# If faculty in institution exists delete it
if record != None:
sqtDelInstFac = "DELETE FROM FacIn WHERE InstitutionID = (?) AND FacultyID = (?)"
con.execute(sqtDelInstFac, (instID, facID))
# Check if faculty exists
sqlQueryCheckExist = "SELECT * FROM Faculties WHERE FacultyName = (?)"
sqlRes = con.execute(sqlQueryCheckExist, (faculty_test,))
record = sqlRes.fetchone()
# If faculty exists delete it
if record != None:
sqlDelFac = "DELETE FROM Faculties WHERE FacultyID = (?)"
con.execute(sqlDelFac, (facID,))
# Check if institution exists
sqlQueryCheckExist = "SELECT * FROM Institutions WHERE InstitutionName = (?)"
sqlRes = con.execute(sqlQueryCheckExist, (institution_test,))
record = sqlRes.fetchone()
# If institution exists create it
if record != None:
sqtDelInst = "DELETE FROM Institutions WHERE InstitutionID = (?)"
con.execute(sqtDelInst, (instID,))
# Check if additional faculty exists
sqlQueryCheckExist = "SELECT * FROM Faculties WHERE FacultyName = (?)"
sqlRes = con.execute(sqlQueryCheckExist, (newFacTest,))
record = sqlRes.fetchone()
# If faculty exists create it
if record != None:
sqtDelInst = "DELETE FROM Faculties WHERE FacultyID = (?)"
con.execute(sqtDelInst, (record[0],))
# Commit the changes in users table
con.commit()
# CLose connection to DB
con.close()
class TestManageFaculties:
def test_manage_faculties_page(self, application: str, ff_browser: webdriver.Firefox, db_prepare_manage_fac):
# Run logout to clean session
ff_browser.get(application + "/logout")
# Open the login page
ff_browser.get(application + "/login")
# Get username and password elements on page
username = ff_browser.find_element_by_name("username")
password = ff_browser.find_element_by_name("password")
# Get submit button element
btnSubmit = ff_browser.find_element_by_xpath("/html/body/div[2]/form/input")
# Inject username and password of test user
username.send_keys(username_test)
password.send_keys(password_test)
# Click on submit button
btnSubmit.click()
# Open the control panel page
ff_browser.get(application + "/controlpanel")
# Get manage faculties button
btnManageFaculties = ff_browser.find_element_by_xpath("/html/body/div[2]/div[1]/a[2]/button")
# Click the manage faculties buttons
btnManageFaculties.click()
# Get manage faculties title
manageFacTitle = ff_browser.find_element_by_xpath("/html/body/div[2]/h1")
assert (manageFacTitle.text == "Manage Faculties:")
def test_create_faculty_page(self, application: str, ff_browser: webdriver.Firefox, db_prepare_manage_fac):
# Run logout to clean session
ff_browser.get(application + "/logout")
# Open the login page
ff_browser.get(application + "/login")
# Get username and password elements on page
username = ff_browser.find_element_by_name("username")
password = ff_browser.find_element_by_name("password")
# Get submit button element
btnSubmit = ff_browser.find_element_by_xpath("/html/body/div[2]/form/input")
# Inject username and password of test user
username.send_keys(username_test)
password.send_keys(password_test)
# Click on submit button
btnSubmit.click()
# Open the control panel page
ff_browser.get(application + "/manage_faculties")
# Get manage faculty button
btnCreateFac = ff_browser.find_element_by_xpath("/html/body/div[2]/div[2]/a/button")
# Click the manage institution buttons
btnCreateFac.click()
# Get manage institution title
createFacTitle = ff_browser.find_element_by_xpath("/html/body/div[2]/h1")
assert (createFacTitle.text == "Add new Faculty:")
def test_create_new_faculty(self, application: str, ff_browser: webdriver.Firefox, db_prepare_manage_fac):
# Run logout to clean session
ff_browser.get(application + "/logout")
# Open the login page
ff_browser.get(application + "/login")
# Get username and password elements on page
username = ff_browser.find_element_by_name("username")
password = ff_browser.find_element_by_name("password")
# Get submit button element
btnSubmit = ff_browser.find_element_by_xpath("/html/body/div[2]/form/input")
# Inject username and password of test user
username.send_keys(username_test)
password.send_keys(password_test)
# Click on submit button
btnSubmit.click()
# Open the create faculty page
ff_browser.get(application + "/create_faculty")
# Get faculty name input
facName = ff_browser.find_element_by_xpath("/html/body/div[2]/form/p/input")
# Send value for new faculty
facName.send_keys(newFacTest)
# Get save faculty button
btnSaveFac = ff_browser.find_element_by_xpath("/html/body/div[2]/form/input")
# Click on submit button
btnSaveFac.click()
# Get title element
titleSaved = ff_browser.find_element_by_xpath("/html/body/div[2]/h1")
assert (titleSaved.text == "Manage Faculties:")
def test_create_new_faculty_wrong_name(self, application: str, ff_browser: webdriver.Firefox, db_prepare_manage_fac):
# Run logout to clean session
ff_browser.get(application + "/logout")
# Open the login page
ff_browser.get(application + "/login")
# Get username and password elements on page
username = ff_browser.find_element_by_name("username")
password = ff_browser.find_element_by_name("password")
# Get submit button element
btnSubmit = ff_browser.find_element_by_xpath("/html/body/div[2]/form/input")
# Inject username and password of test user
username.send_keys(username_test)
password.send_keys(password_test)
# Click on submit button
btnSubmit.click()
# Open the create faculty page
ff_browser.get(application + "/create_faculty")
# Get faculty name input
facName = ff_browser.find_element_by_xpath("/html/body/div[2]/form/p/input")
# Send value for new faculty
facName.send_keys(faculty_test)
# Get save faculty button
btnSaveFac = ff_browser.find_element_by_xpath("/html/body/div[2]/form/input")
# Click on submit button
btnSaveFac.click()
# Get title element
titleWrongName = ff_browser.find_element_by_xpath("/html/body/div[2]/p[1]")
assert (titleWrongName.text == "Faculty already exists")
def test_edit_faculty_page(self, application: str, ff_browser: webdriver.Firefox, db_prepare_manage_fac):
# Run logout to clean session
ff_browser.get(application + "/logout")
# Open the login page
ff_browser.get(application + "/login")
# Get username and password elements on page
username = ff_browser.find_element_by_name("username")
password = ff_browser.find_element_by_name("password")
# Get submit button element
btnSubmit = ff_browser.find_element_by_xpath("/html/body/div[2]/form/input")
# Inject username and password of test user
username.send_keys(username_test)
password.send_keys(password_test)
# Click on submit button
btnSubmit.click()
# Open the control panel page
ff_browser.get(application + "/manage_faculties")
# Get selection of faculty box
faculty = Select(ff_browser.find_element_by_name("faculty"))
# Select the faculty for test
faculty.select_by_visible_text(faculty_test)
# Get edit faculty button
btnEditFac = ff_browser.find_element_by_xpath("/html/body/div[2]/div[1]/a/button")
# Click the edit faculty buttons
btnEditFac.click()
# Get manage faculty title
editFacTitle = ff_browser.find_element_by_xpath("/html/body/div[2]/h1")
assert (editFacTitle.text == "Edit Faculty:")
def test_edit_faculty(self, application: str, ff_browser: webdriver.Firefox, db_prepare_manage_fac):
# Run logout to clean session
ff_browser.get(application + "/logout")
# Open the login page
ff_browser.get(application + "/login")
# Get username and password elements on page
username = ff_browser.find_element_by_name("username")
password = ff_browser.find_element_by_name("password")
# Get submit button element
btnSubmit = ff_browser.find_element_by_xpath("/html/body/div[2]/form/input")
# Inject username and password of test user
username.send_keys(username_test)
password.send_keys(password_test)
# Click on submit button
btnSubmit.click()
# Open the edit faculty page
ff_browser.get(application + "/edit_faculty/" + str(facID))
# Get faculty name input
facName = ff_browser.find_element_by_xpath("/html/body/div[2]/form/p/input")
# Clear facName before entering value
facName.clear()
# Send value for new faculy
facName.send_keys(newFacTest)
# Get save faculty button
btnSaveFac = ff_browser.find_element_by_xpath("/html/body/div[2]/form/input")
# Click on submit button
btnSaveFac.click()
# Get title element
titleSaved = ff_browser.find_element_by_xpath("/html/body/div[2]/h1")
assert (titleSaved.text == "Manage Faculties:")
def test_edit_faculty_existing_name(self, application: str, ff_browser: webdriver.Firefox, db_prepare_manage_fac):
# Run logout to clean session
ff_browser.get(application + "/logout")
# Open the login page
ff_browser.get(application + "/login")
# Get username and password elements on page
username = ff_browser.find_element_by_name("username")
password = ff_browser.find_element_by_name("password")
# Get submit button element
btnSubmit = ff_browser.find_element_by_xpath("/html/body/div[2]/form/input")
# Inject username and password of test user
username.send_keys(username_test)
password.send_keys(password_test)
# Click on submit button
btnSubmit.click()
# Open the edit faculty page
ff_browser.get(application + "/edit_faculty/" + str(facID))
# Get faculty name input
facName = ff_browser.find_element_by_xpath("/html/body/div[2]/form/p/input")
# Clear facName before entering value
facName.clear()
# Send value for new faculty
facName.send_keys(faculty_test)
# Get save faculty button
btnSaveFac = ff_browser.find_element_by_xpath("/html/body/div[2]/form/input")
# Click on submit button
btnSaveFac.click()
# Get title element
titleWrongName = ff_browser.find_element_by_xpath("/html/body/div[2]/p[1]")
assert (titleWrongName.text == "Faculty already exists")
def test_not_admin_manage_faculty_page(self, application: str, ff_browser: webdriver.Firefox, db_prepare_manage_fac):
# Run logout to clean session
ff_browser.get(application + "/logout")
# Open the control panel page
ff_browser.get(application + "/manage_faculties")
# Get manage institution title
mainTitle = ff_browser.find_element_by_xpath("/html/body/div[2]/h1")
assert (mainTitle.text == "Hello")
def test_not_admin_create_faculty_page(self, application: str, ff_browser: webdriver.Firefox, db_prepare_manage_fac):
# Run logout to clean session
ff_browser.get(application + "/logout")
# Open the control panel page
ff_browser.get(application + "/create_faculty")
# Get manage institution title
mainTitle = ff_browser.find_element_by_xpath("/html/body/div[2]/h1")
assert (mainTitle.text == "Hello")
def test_not_admin_edit_faculty_page(self, application: str, ff_browser: webdriver.Firefox, db_prepare_manage_fac):
# Run logout to clean session
ff_browser.get(application + "/logout")
# Open the control panel page
ff_browser.get(application + "/edit_faculty/" + str(facID))
# Get manage institution title
mainTitle = ff_browser.find_element_by_xpath("/html/body/div[2]/h1")
assert (mainTitle.text == "Hello")
| 35.657143
| 123
| 0.663955
| 1,934
| 16,224
| 5.389866
| 0.08635
| 0.069071
| 0.053626
| 0.082502
| 0.809766
| 0.805545
| 0.788469
| 0.777533
| 0.762855
| 0.747026
| 0
| 0.005107
| 0.239645
| 16,224
| 455
| 124
| 35.657143
| 0.839899
| 0.216963
| 0
| 0.665072
| 0
| 0
| 0.175433
| 0.045606
| 0
| 0
| 0
| 0
| 0.047847
| 1
| 0.052632
| false
| 0.08134
| 0.028708
| 0
| 0.086124
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
0ad17b7bcbc1a6ba9d64dbbf0e5406ce4ae767cc
| 7,220
|
py
|
Python
|
webapi/tests/test_post.py
|
xqhgit/fastapi-vue-blog
|
4f90869c4b8078205239be38d79ac9be6dcb56b8
|
[
"MIT"
] | 3
|
2022-03-10T08:43:24.000Z
|
2022-03-26T09:10:29.000Z
|
webapi/tests/test_post.py
|
xqhgit/fastapi-vue-blog
|
4f90869c4b8078205239be38d79ac9be6dcb56b8
|
[
"MIT"
] | null | null | null |
webapi/tests/test_post.py
|
xqhgit/fastapi-vue-blog
|
4f90869c4b8078205239be38d79ac9be6dcb56b8
|
[
"MIT"
] | 2
|
2022-03-11T03:14:38.000Z
|
2022-03-19T07:27:56.000Z
|
import json
import pytest
from starlette.testclient import TestClient
from webapi.db.dals.post_dal import Post, PostDAL
def test_get_posts(test_app: TestClient, monkeypatch):
data = {
"total": 2,
"items": [
{
"id": 1,
"title": "a",
"description": "a",
"timestamp": "2021-09-20T17:03:06",
"can_comment": True,
"is_published": True,
"categories": [
],
"comments": 0
},
{
"id": 2,
"title": "b",
"description": "b",
"timestamp": "2021-09-25T15:02:07",
"can_comment": True,
"is_published": True,
"categories": [
],
"comments": 0
}
]
}
async def mock_count(self, title=None):
return 2
async def mock_get_limit(self, title=None, *, page, limit):
d = [
{
"id": 1,
"title": "a",
"description": "a",
"timestamp": "2021-09-20T17:03:06",
"can_comment": True,
"is_published": True,
},
{
"id": 2,
"title": "b",
"description": "b",
"timestamp": "2021-09-25T15:02:07",
"can_comment": True,
"is_published": True,
}
]
return [Post(**i) for i in d]
monkeypatch.setattr(PostDAL, "count", mock_count)
monkeypatch.setattr(PostDAL, "get_limit", mock_get_limit)
response = test_app.get(url='/api/posts/')
assert response.status_code == 200
assert response.json() == data
def test_get_post(test_app: TestClient, monkeypatch):
data = {
"id": 1,
"title": "a",
"description": "aaaaaaaaaaaaaaaa aaaaaaaaaaaaaaaa aaaaaaaaaaaaaaaa aaaaaaaaaaaaaaaa",
"body": "a",
"body_html": "a",
"timestamp": "2021-09-20T17:03:06",
"can_comment": True,
"is_published": True,
"categories": [
{
"id": 8,
"name": "Go"
},
{
"id": 13,
"name": "Python0"
}
],
"comments": []
}
async def mock_get_by_id(self, record_id):
return data
monkeypatch.setattr(PostDAL, 'get_by_id', mock_get_by_id)
response = test_app.get(url='/api/posts/1/')
assert response.status_code == 200
assert response.json() == data
def test_create_post(test_app_token: TestClient, monkeypatch):
data = {
"id": 1,
"title": "test",
"description": "aaaaaaaaaaaaaaaa aaaaaaaaaaaaaaaa aaaaaaaaaaaaaaaa aaaaaaaaaaaaaaaa",
"body": "a",
"body_html": "a",
"can_comment": True,
"is_published": True
}
create_data = {
**data,
"categories": []
}
result = {
"id": 1,
"title": "test",
}
async def mock_get_by_title(self, title):
return []
async def mock_create(self, obj_in):
return Post(**data)
monkeypatch.setattr(PostDAL, 'get_by_title', mock_get_by_title)
monkeypatch.setattr(PostDAL, 'create', mock_create)
response = test_app_token.post(url='/api/posts/', json=create_data)
assert response.status_code == 201
assert response.json() == result
def test_create_post_no_token(test_app: TestClient, monkeypatch):
data = {
"id": 1,
"title": "test",
"description": "aaaaaaaaaaaaaaaa aaaaaaaaaaaaaaaa aaaaaaaaaaaaaaaa aaaaaaaaaaaaaaaa",
"body": "a",
"can_comment": True,
"is_published": True
}
create_data = {
**data,
"categories": []
}
result = {
"id": 1,
"title": "test",
}
async def mock_get_by_title(self, title):
return []
async def mock_create(self, obj_in):
return Post(**data)
monkeypatch.setattr(PostDAL, 'get_by_title', mock_get_by_title)
monkeypatch.setattr(PostDAL, 'create', mock_create)
response = test_app.post(url='/api/posts/', json=create_data)
assert response.status_code == 401
def test_update_post(test_app_token: TestClient, monkeypatch):
data = {
"id": 1,
"title": "test",
"description": "aaaaaaaaaaaaaaaa aaaaaaaaaaaaaaaa aaaaaaaaaaaaaaaa aaaaaaaaaaaaaaaa",
"body": "a",
"timestamp": "2021-09-20T17:03:06",
"can_comment": True,
"is_published": True
}
update_data = {
"title": "test",
"description": "aaaaaaaaaaaaaaaa aaaaaaaaaaaaaaaa aaaaaaaaaaaaaaaa aaaaaaaaaaaaaaaa",
"body": "a",
"can_comment": True,
"is_published": True,
"categories": []
}
result = {
"id": 1,
"title": "test",
}
async def mock_get_by_id(self, title):
return [1, ]
async def mock_update(self, db_obj, obj_in):
return Post(**data)
monkeypatch.setattr(PostDAL, 'get_by_id', mock_get_by_id)
monkeypatch.setattr(PostDAL, 'update', mock_update)
response = test_app_token.put(url='/api/posts/1/', json=update_data)
assert response.status_code == 200
assert response.json() == result
def test_update_post_no_toke(test_app: TestClient, monkeypatch):
data = {
"id": 1,
"title": "test",
"description": "aaaaaaaaaaaaaaaa aaaaaaaaaaaaaaaa aaaaaaaaaaaaaaaa aaaaaaaaaaaaaaaa",
"body": "a",
"timestamp": "2021-09-20T17:03:06",
"can_comment": True,
"is_published": True
}
update_data = {
"title": "test",
"description": "aaaaaaaaaaaaaaaa aaaaaaaaaaaaaaaa aaaaaaaaaaaaaaaa aaaaaaaaaaaaaaaa",
"body": "a",
"can_comment": True,
"is_published": True,
"categories": []
}
result = {
"id": 1,
"title": "test",
}
async def mock_get_by_id(self, title):
return [1, ]
async def mock_update(self, db_obj, obj_in):
return Post(**data)
monkeypatch.setattr(PostDAL, 'get_by_id', mock_get_by_id)
monkeypatch.setattr(PostDAL, 'update', mock_update)
response = test_app.put(url='/api/posts/1/', json=update_data)
assert response.status_code == 401
def test_delete_post(test_app_token: TestClient, monkeypatch):
async def mock_get_by_id(self, title):
return [1, ]
async def mock_delete(self, db_obj):
pass
monkeypatch.setattr(PostDAL, 'get_by_id', mock_get_by_id)
monkeypatch.setattr(PostDAL, 'delete', mock_delete)
response = test_app_token.delete(url='/api/posts/1/')
assert response.status_code == 200
def test_delete_post_no_token(test_app: TestClient, monkeypatch):
async def mock_get_by_id(self, title):
return [1, ]
async def mock_delete(self, db_obj):
pass
monkeypatch.setattr(PostDAL, 'get_by_id', mock_get_by_id)
monkeypatch.setattr(PostDAL, 'delete', mock_delete)
response = test_app.delete(url='/api/posts/1/')
assert response.status_code == 401
| 27.245283
| 93
| 0.556094
| 769
| 7,220
| 5.0013
| 0.113134
| 0.174727
| 0.046802
| 0.045762
| 0.887416
| 0.878055
| 0.862455
| 0.836713
| 0.815653
| 0.782631
| 0
| 0.030608
| 0.312188
| 7,220
| 264
| 94
| 27.348485
| 0.743858
| 0
| 0
| 0.701835
| 0
| 0
| 0.219668
| 0
| 0
| 0
| 0
| 0
| 0.055046
| 1
| 0.036697
| false
| 0.009174
| 0.018349
| 0
| 0.114679
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c60532d800d8584b5a0f2a15210031ce4d6204b
| 11,080
|
py
|
Python
|
tf_routes/visualizations.py
|
jalhackl/tf_routes
|
25ce3bca61c52a537b2d0dd08696e9a8f54ab29b
|
[
"MIT"
] | 1
|
2022-02-21T09:23:50.000Z
|
2022-02-21T09:23:50.000Z
|
tf_routes/visualizations.py
|
jalhackl/tf_routes
|
25ce3bca61c52a537b2d0dd08696e9a8f54ab29b
|
[
"MIT"
] | 1
|
2022-02-21T21:41:43.000Z
|
2022-02-21T21:41:43.000Z
|
tf_routes/visualizations.py
|
jalhackl/tf_routes
|
25ce3bca61c52a537b2d0dd08696e9a8f54ab29b
|
[
"MIT"
] | null | null | null |
from rdkit import Chem
from rdkit.Chem import Draw
from rdkit.Chem.Draw import IPythonConsole
IPythonConsole.ipython_useSVG = False
import logging
from colorsys import hls_to_rgb
from copy import deepcopy
from rdkit.Chem import AllChem, Draw
from rdkit.Chem.Draw import IPythonConsole, rdMolDraw2D
logger = logging.getLogger(__name__)
def _show_common_core_gradient(
mol, highlight, mutationl, percomponent=False, numbers=False
):
"""
Draw the molecule with color gradient for mutation order
highlight: list indicating the ccore, fct highlights the common core in black
mutationl: list indicating the mutation order, fct draws color gradient
percompoentn: boolean; one color gradient for each component (False) or one for all dummy atoms
- currently, the components are processed successively, so perhaps True is better suited to illustrate the mutation order
numbers: boolean; show also numbers showing the mutation order
"""
# https://rdkit.blogspot.com/2015/02/new-drawing-code.html
mol = deepcopy(mol)
AllChem.Compute2DCoords(mol)
drawer = rdMolDraw2D.MolDraw2DSVG(800, 800)
# new
drawer.drawOptions().setHighlightColour((0.8, 0.8, 0.8))
drawer.SetFontSize(0.3)
opts = drawer.drawOptions()
for i in mol.GetAtoms():
opts.atomLabels[i.GetIdx()] = (
str(i.GetProp("atom_index")) + ":" + i.GetProp("atom_type")
)
colours = [(0.8, 0.0, 0.8), (0, 0.15, 0), (0, 0.8, 0.8), (0, 0, 0.8)]
atom_cols = {}
for i, at in enumerate(highlight):
atom_cols[at] = colours[1]
if percomponent == False:
mutationlflat = [item for sublist in mutationl for item in sublist]
# colour gradient
rcolours = rainbow_color_gradient(n=len(mutationlflat))
colours = [(0.8, 0.0, 0.8), (0.8, 0.8, 0), (0, 0.8, 0.8), (0, 0, 0.8)]
for i, at in enumerate(mutationlflat):
atom_cols[at] = rcolours[i]
if numbers == True:
atomlist = mol.GetAtoms()
for i, at in enumerate(mutationlflat):
mnr = i + 1
lbl = "%.0f" % mnr
atomlist[at].SetProp("atomNote", lbl)
mutationlflat = mutationlflat + list(highlight)
drawer.DrawMolecule(
mol, highlightAtoms=mutationlflat, highlightAtomColors=atom_cols
)
Draw.DrawingOptions.includeAtomNumbers = False
drawer.FinishDrawing()
svg = drawer.GetDrawingText().replace("svg:", "")
img = Draw.MolToImage(
mol,
highlightAtoms=mutationlflat,
highlightAtomColors=atom_cols,
useSVG=False,
)
else:
mutationlflat = [item for sublist in mutationl for item in sublist]
colours = [(0.8, 0.0, 0.8), (0.8, 0.8, 0), (0, 0.8, 0.8), (0, 0, 0.8)]
componentcount = 0
for mutcomp in mutationl:
rcolours = rainbow_color_gradient(n=len(mutcomp))
for i, at in enumerate(mutcomp):
atom_cols[at] = rcolours[i]
componentcount = componentcount + 1
if numbers == True:
atomlist = mol.GetAtoms()
for i, at in enumerate(mutationlflat):
mnr = i + 1
lbl = "%.0f" % mnr
atomlist[at].SetProp("atomNote", lbl)
mutationlflat = mutationlflat + list(highlight)
drawer.DrawMolecule(
mol, highlightAtoms=mutationlflat, highlightAtomColors=atom_cols
)
Draw.DrawingOptions.includeAtomNumbers = False
drawer.FinishDrawing()
svg = drawer.GetDrawingText().replace("svg:", "")
img = Draw.MolToImage(
mol,
highlightAtoms=mutationlflat,
highlightAtomColors=atom_cols,
useSVG=False,
)
return svg
def _show_common_core_gradient_write(mol, highlight, mutationl, percomponent=False):
"""
almost identical to _show_common_core_gradient_write, used for creating visualizations to write in files
Draw the molecule with color gradient for mutation order
highlight: list indicating the ccore, fct highlights the common core in black
mutationl: list indicating the mutation order, fct draws color gradient
percompoentn: boolean; one color gradient for each component (False) or one for all dummy atoms
- currently, the components are processed successively, so perhaps True is better suited to illustrate the mutation order
numbers: boolean; show also numbers showing the mutation order
legendtext: string; legend to be placed below molecule
"""
# https://rdkit.blogspot.com/2015/02/new-drawing-code.html
mol = deepcopy(mol)
AllChem.Compute2DCoords(mol)
drawer = rdMolDraw2D.MolDraw2DSVG(300, 300)
drawer.drawOptions().setHighlightColour((0.8, 0.8, 0.8))
drawer.SetFontSize(0.3)
opts = drawer.drawOptions()
for i in mol.GetAtoms():
opts.atomLabels[i.GetIdx()] = (
str(i.GetProp("atom_index")) + ":" + i.GetProp("atom_type")
)
colours = [(0.8, 0.0, 0.8), (0, 0.15, 0), (0, 0.8, 0.8), (0, 0, 0.8)]
atom_cols = {}
for i, at in enumerate(highlight):
atom_cols[at] = colours[1]
if percomponent == False:
mutationlflat = [item for sublist in mutationl for item in sublist]
# rainbow colors
rcolours = rainbow_color_gradient(n=len(mutationlflat))
colours = [(0.8, 0.0, 0.8), (0.8, 0.8, 0), (0, 0.8, 0.8), (0, 0, 0.8)]
for i, at in enumerate(mutationlflat):
atom_cols[at] = rcolours[i]
mutationlflat = mutationlflat + list(highlight)
drawer.DrawMolecule(
mol, highlightAtoms=mutationlflat, highlightAtomColors=atom_cols
)
Draw.DrawingOptions.includeAtomNumbers = False
drawer.FinishDrawing()
svg = drawer.GetDrawingText().replace("svg:", "")
img = Draw.MolToImage(
mol,
highlightAtoms=mutationlflat,
highlightAtomColors=atom_cols,
useSVG=False,
)
else:
mutationlflat = [item for sublist in mutationl for item in sublist]
colours = [(0.8, 0.0, 0.8), (0.8, 0.8, 0), (0, 0.8, 0.8), (0, 0, 0.8)]
componentcount = 0
for mutcomp in mutationl:
rcolours = rainbow_color_gradient(n=len(mutcomp))
for i, at in enumerate(mutcomp):
atom_cols[at] = rcolours[i]
componentcount = componentcount + 1
mutationlflat = mutationlflat + list(highlight)
drawer.DrawMolecule(
mol, highlightAtoms=mutationlflat, highlightAtomColors=atom_cols
)
Draw.DrawingOptions.includeAtomNumbers = False
drawer.FinishDrawing()
svg = drawer.GetDrawingText().replace("svg:", "")
img = Draw.MolToImage(
mol,
highlightAtoms=mutationlflat,
highlightAtomColors=atom_cols,
useSVG=False,
)
return svg, img
def animated_visualization_3d_v1(mol, mutationl, ccoremol, hits):
"""
animated visualization using py3Dmol
"""
import time
import py3Dmol
ccoremol
ccoretemplate = ccoremol
AllChem.EmbedMolecule(ccoretemplate)
m_match = hits
molsreduced = []
flatorder = [item for items in mutationl for item in items]
for i in range(len(flatorder)):
mol1newembed = AllChem.ConstrainedEmbed(mol, ccoretemplate)
mol1ed = Chem.EditableMol(mol1newembed)
currentorderlist = flatorder[0 : i + 1]
currentorderlist.sort(reverse=True)
for j in currentorderlist:
mol1ed.RemoveAtom(j)
molback = mol1ed.GetMol()
molsreduced.append(molback)
ccoreblock = Chem.MolToMolBlock(ccoretemplate, kekulize=False)
molblocksreduced = []
for i in molsreduced:
mblock = Chem.MolToMolBlock(i, kekulize=False)
molblocksreduced.append(mblock)
view = py3Dmol.view()
counter = 0
for i in molblocksreduced:
view.removeAllModels()
view.addModel(i, "mol")
view.setStyle({"model": 0}, {"stick": {}})
view.setBackgroundColor("0xeeeeee")
if counter == 0:
view.zoomTo()
counter = counter + 1
view.update()
time.sleep(1)
def animated_visualization_3d_v2(mol, mutationl, ccoremol, hits):
"""
animated visualization using py3Dmol
common core molecules are visualized as spheres
"""
import time
import py3Dmol
from rdkit.Chem import TemplateAlign
m_match = hits
ccoremol
ccoretemplate = ccoremol
AllChem.EmbedMolecule(ccoretemplate)
mol1newembed = AllChem.ConstrainedEmbed(mol, ccoretemplate)
molsreduced = []
flatorder = [item for items in mutationl for item in items]
newhits = mol.GetSubstructMatch(ccoremol)
mol.GetNumConformers()
counter = 0
for i in range(mol.GetNumAtoms()):
for j in newhits:
if j == i:
newpos = ccoremol.GetConformer().GetAtomPosition(counter)
mol.GetConformer().SetAtomPosition(i, newpos)
counter = counter + 1
for i in range(len(flatorder)):
mol1newembed = mol
mol1newembed = AllChem.ConstrainedEmbed(mol1newembed, ccoretemplate)
mol1newembed.GetNumConformers()
mol1ed = Chem.EditableMol(mol1newembed)
currentorderlist = flatorder[0 : i + 1]
currentorderlist.sort(reverse=True)
for j in currentorderlist:
mol1ed.RemoveAtom(j)
molback = mol1ed.GetMol()
molsreduced.append(molback)
ccoreblock = Chem.MolToMolBlock(ccoretemplate, kekulize=False)
molblocksreduced = []
for i in molsreduced:
mblock = Chem.MolToMolBlock(i, kekulize=False)
molblocksreduced.append(mblock)
view = py3Dmol.view()
counter = 0
for i in molblocksreduced:
view.removeAllModels()
view.addModel(i, "mol")
view.setStyle({"model": 0}, {"stick": {}})
view.addModel(ccoreblock, "mol")
# view.setStyle({'model':1},{'sphere':{'color':'black', 'radius':1.8}})
view.setStyle({"model": 1}, {"sphere": {"radius": 0.8}})
view.setBackgroundColor("0xeeeeee")
if counter == 0:
view.zoomTo()
counter = counter + 1
view.update()
time.sleep(1)
def convertgraphics(img1, img2):
"""
converting files to appropriate format for writing in file
"""
from io import BytesIO
from cairosvg import svg2png
imgn1 = svg2png(img1)
imgn2 = svg2png(img2)
import PIL.Image
imo1 = PIL.Image.open(BytesIO(imgn1))
imo2 = PIL.Image.open(BytesIO(imgn2))
return imo1, imo2
def rainbow_color_gradient(n=10, end=1 / 3):
# end=n/40
# end = 1/3
if n > 1:
return [hls_to_rgb(end * i / (n - 1), 0.5, 1) for i in range(n)]
else:
return [hls_to_rgb(end * i / (n), 0.5, 1) for i in range(n)]
| 27.493797
| 125
| 0.621209
| 1,261
| 11,080
| 5.408406
| 0.185567
| 0.013196
| 0.015836
| 0.010557
| 0.830059
| 0.784457
| 0.766276
| 0.738416
| 0.717302
| 0.717302
| 0
| 0.031849
| 0.274549
| 11,080
| 402
| 126
| 27.562189
| 0.816621
| 0.141155
| 0
| 0.763713
| 0
| 0
| 0.015137
| 0
| 0
| 0
| 0.001706
| 0
| 0
| 1
| 0.025316
| false
| 0
| 0.067511
| 0
| 0.113924
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c8d13b1c87077b4c4f2ff141fbf7b1ee74251d6
| 53,005
|
py
|
Python
|
ngraph/python/tests/test_ngraph/test_create_op.py
|
AnastasiaKazantaeva/openvino
|
ec48fcb29bef94cca480573110a598afc3515019
|
[
"Apache-2.0"
] | null | null | null |
ngraph/python/tests/test_ngraph/test_create_op.py
|
AnastasiaKazantaeva/openvino
|
ec48fcb29bef94cca480573110a598afc3515019
|
[
"Apache-2.0"
] | 6
|
2022-02-11T15:01:22.000Z
|
2022-02-21T13:24:09.000Z
|
ngraph/python/tests/test_ngraph/test_create_op.py
|
AnastasiaKazantaeva/openvino
|
ec48fcb29bef94cca480573110a598afc3515019
|
[
"Apache-2.0"
] | null | null | null |
# ******************************************************************************
# Copyright 2017-2020 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ******************************************************************************
import numpy as np
import pytest
from _pyngraph import PartialShape
import ngraph as ng
import ngraph.opset1 as ng_opset1
from ngraph.impl import Type
from tests import skip_segfault
np_types = [np.float32, np.int32]
integral_np_types = [
np.int8,
np.int16,
np.int32,
np.int64,
np.uint8,
np.uint16,
np.uint32,
np.uint64,
]
@pytest.mark.parametrize("dtype", np_types)
def test_binary_convolution(dtype):
strides = np.array([1, 1])
pads_begin = np.array([0, 0])
pads_end = np.array([0, 0])
dilations = np.array([1, 1])
mode = "xnor-popcount"
pad_value = 0.0
input0_shape = [1, 1, 9, 9]
input1_shape = [1, 1, 3, 3]
expected_shape = [1, 1, 7, 7]
parameter_input0 = ng.parameter(input0_shape, name="Input0", dtype=dtype)
parameter_input1 = ng.parameter(input1_shape, name="Input1", dtype=dtype)
node = ng.binary_convolution(
parameter_input0, parameter_input1, strides, pads_begin, pads_end, dilations, mode, pad_value,
)
assert node.get_type_name() == "BinaryConvolution"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == expected_shape
@pytest.mark.parametrize("dtype", np_types)
def test_ctc_greedy_decoder(dtype):
input0_shape = [20, 8, 128]
input1_shape = [20, 8]
expected_shape = [8, 20, 1, 1]
parameter_input0 = ng.parameter(input0_shape, name="Input0", dtype=dtype)
parameter_input1 = ng.parameter(input1_shape, name="Input1", dtype=dtype)
node = ng.ctc_greedy_decoder(parameter_input0, parameter_input1)
assert node.get_type_name() == "CTCGreedyDecoder"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == expected_shape
@pytest.mark.parametrize("dtype", np_types)
def test_deformable_convolution(dtype):
strides = np.array([1, 1])
pads_begin = np.array([0, 0])
pads_end = np.array([0, 0])
dilations = np.array([1, 1])
input0_shape = [1, 1, 9, 9]
input1_shape = [1, 1, 9, 9]
input2_shape = [1, 1, 3, 3]
expected_shape = [1, 1, 7, 7]
parameter_input0 = ng.parameter(input0_shape, name="Input0", dtype=dtype)
parameter_input1 = ng.parameter(input1_shape, name="Input1", dtype=dtype)
parameter_input2 = ng.parameter(input2_shape, name="Input2", dtype=dtype)
node = ng.deformable_convolution(
parameter_input0, parameter_input1, parameter_input2, strides, pads_begin, pads_end, dilations,
)
assert node.get_type_name() == "DeformableConvolution"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == expected_shape
@pytest.mark.parametrize("dtype", np_types)
def test_deformable_psroi_pooling(dtype):
output_dim = 8
spatial_scale = 0.0625
group_size = 7
mode = "bilinear_deformable"
spatial_bins_x = 4
spatial_bins_y = 4
trans_std = 0.1
part_size = 7
input0_shape = [1, 392, 38, 63]
input1_shape = [300, 5]
input2_shape = [300, 2, 7, 7]
expected_shape = [300, 8, 7, 7]
parameter_input0 = ng.parameter(input0_shape, name="Input0", dtype=dtype)
parameter_input1 = ng.parameter(input1_shape, name="Input1", dtype=dtype)
parameter_input2 = ng.parameter(input2_shape, name="Input2", dtype=dtype)
node = ng.deformable_psroi_pooling(
parameter_input0,
parameter_input1,
output_dim,
spatial_scale,
group_size,
mode,
spatial_bins_x,
spatial_bins_y,
trans_std,
part_size,
offsets=parameter_input2,
)
assert node.get_type_name() == "DeformablePSROIPooling"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == expected_shape
@pytest.mark.parametrize("dtype", np_types)
def test_floor_mod(dtype):
input0_shape = [8, 1, 6, 1]
input1_shape = [7, 1, 5]
expected_shape = [8, 7, 6, 5]
parameter_input0 = ng.parameter(input0_shape, name="Input0", dtype=dtype)
parameter_input1 = ng.parameter(input1_shape, name="Input1", dtype=dtype)
node = ng.floor_mod(parameter_input0, parameter_input1)
assert node.get_type_name() == "FloorMod"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == expected_shape
@pytest.mark.parametrize("dtype", np_types)
def test_gather_tree(dtype):
input0_shape = [100, 1, 10]
input1_shape = [100, 1, 10]
input2_shape = [1]
input3_shape = []
expected_shape = [100, 1, 10]
parameter_input0 = ng.parameter(input0_shape, name="Input0", dtype=dtype)
parameter_input1 = ng.parameter(input1_shape, name="Input1", dtype=dtype)
parameter_input2 = ng.parameter(input2_shape, name="Input2", dtype=dtype)
parameter_input3 = ng.parameter(input3_shape, name="Input3", dtype=dtype)
node = ng.gather_tree(parameter_input0, parameter_input1, parameter_input2, parameter_input3)
assert node.get_type_name() == "GatherTree"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == expected_shape
@pytest.mark.parametrize("dtype", [np.float32, np.float64])
def test_lstm_cell_operator(dtype):
batch_size = 1
input_size = 16
hidden_size = 128
X_shape = [batch_size, input_size]
H_t_shape = [batch_size, hidden_size]
C_t_shape = [batch_size, hidden_size]
W_shape = [4 * hidden_size, input_size]
R_shape = [4 * hidden_size, hidden_size]
B_shape = [4 * hidden_size]
parameter_X = ng.parameter(X_shape, name="X", dtype=dtype)
parameter_H_t = ng.parameter(H_t_shape, name="H_t", dtype=dtype)
parameter_C_t = ng.parameter(C_t_shape, name="C_t", dtype=dtype)
parameter_W = ng.parameter(W_shape, name="W", dtype=dtype)
parameter_R = ng.parameter(R_shape, name="R", dtype=dtype)
parameter_B = ng.parameter(B_shape, name="B", dtype=dtype)
expected_shape = [1, 128]
node_default = ng.lstm_cell(
parameter_X, parameter_H_t, parameter_C_t, parameter_W, parameter_R, parameter_B, hidden_size,
)
assert node_default.get_type_name() == "LSTMCell"
assert node_default.get_output_size() == 2
assert list(node_default.get_output_shape(0)) == expected_shape
assert list(node_default.get_output_shape(1)) == expected_shape
activations = ["tanh", "Sigmoid", "RELU"]
activation_alpha = [1.0, 2.0, 3.0]
activation_beta = [3.0, 2.0, 1.0]
clip = 0.5
node_param = ng.lstm_cell(
parameter_X,
parameter_H_t,
parameter_C_t,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
activations,
activation_alpha,
activation_beta,
clip,
)
assert node_param.get_type_name() == "LSTMCell"
assert node_param.get_output_size() == 2
assert list(node_param.get_output_shape(0)) == expected_shape
assert list(node_param.get_output_shape(1)) == expected_shape
@pytest.mark.parametrize("dtype", [np.float32, np.float64])
def test_lstm_cell_operator_opset1(dtype):
batch_size = 1
input_size = 16
hidden_size = 128
X_shape = [batch_size, input_size]
H_t_shape = [batch_size, hidden_size]
C_t_shape = [batch_size, hidden_size]
W_shape = [4 * hidden_size, input_size]
R_shape = [4 * hidden_size, hidden_size]
B_shape = [4 * hidden_size]
parameter_X = ng.parameter(X_shape, name="X", dtype=dtype)
parameter_H_t = ng.parameter(H_t_shape, name="H_t", dtype=dtype)
parameter_C_t = ng.parameter(C_t_shape, name="C_t", dtype=dtype)
parameter_W = ng.parameter(W_shape, name="W", dtype=dtype)
parameter_R = ng.parameter(R_shape, name="R", dtype=dtype)
parameter_B = ng.parameter(B_shape, name="B", dtype=dtype)
expected_shape = [1, 128]
node_default = ng_opset1.lstm_cell(
parameter_X, parameter_H_t, parameter_C_t, parameter_W, parameter_R, parameter_B, hidden_size,
)
assert node_default.get_type_name() == "LSTMCell"
assert node_default.get_output_size() == 2
assert list(node_default.get_output_shape(0)) == expected_shape
assert list(node_default.get_output_shape(1)) == expected_shape
activations = ["tanh", "Sigmoid", "RELU"]
activation_alpha = [1.0, 2.0, 3.0]
activation_beta = [3.0, 2.0, 1.0]
clip = 0.5
node_param = ng_opset1.lstm_cell(
parameter_X,
parameter_H_t,
parameter_C_t,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
activations,
activation_alpha,
activation_beta,
clip,
)
assert node_param.get_type_name() == "LSTMCell"
assert node_param.get_output_size() == 2
assert list(node_param.get_output_shape(0)) == expected_shape
assert list(node_param.get_output_shape(1)) == expected_shape
@pytest.mark.parametrize("dtype", [np.float32, np.float64])
def test_lstm_sequence_operator_bidirectional_opset1(dtype):
batch_size = 1
input_size = 16
hidden_size = 128
num_directions = 2
seq_length = 2
X_shape = [batch_size, seq_length, input_size]
H_t_shape = [batch_size, num_directions, hidden_size]
C_t_shape = [batch_size, num_directions, hidden_size]
seq_len_shape = [batch_size]
W_shape = [num_directions, 4 * hidden_size, input_size]
R_shape = [num_directions, 4 * hidden_size, hidden_size]
B_shape = [num_directions, 4 * hidden_size]
parameter_X = ng.parameter(X_shape, name="X", dtype=dtype)
parameter_H_t = ng.parameter(H_t_shape, name="H_t", dtype=dtype)
parameter_C_t = ng.parameter(C_t_shape, name="C_t", dtype=dtype)
parameter_seq_len = ng.parameter(seq_len_shape, name="seq_len", dtype=np.int32)
parameter_W = ng.parameter(W_shape, name="W", dtype=dtype)
parameter_R = ng.parameter(R_shape, name="R", dtype=dtype)
parameter_B = ng.parameter(B_shape, name="B", dtype=dtype)
direction = "BIDIRECTIONAL"
node = ng_opset1.lstm_sequence(
parameter_X,
parameter_H_t,
parameter_C_t,
parameter_seq_len,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
)
assert node.get_type_name() == "LSTMSequence"
assert node.get_output_size() == 3
activations = ["RELU", "tanh", "Sigmoid"]
activation_alpha = [1.0, 2.0, 3.0]
activation_beta = [3.0, 2.0, 1.0]
clip = 1.22
node_param = ng_opset1.lstm_sequence(
parameter_X,
parameter_H_t,
parameter_C_t,
parameter_seq_len,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
activations,
activation_alpha,
activation_beta,
clip,
)
assert node_param.get_type_name() == "LSTMSequence"
assert node_param.get_output_size() == 3
@pytest.mark.parametrize("dtype", [np.float32, np.float64])
def test_lstm_sequence_operator_reverse_opset1(dtype):
batch_size = 2
input_size = 4
hidden_size = 3
num_directions = 1
seq_length = 2
X_shape = [batch_size, seq_length, input_size]
H_t_shape = [batch_size, num_directions, hidden_size]
C_t_shape = [batch_size, num_directions, hidden_size]
seq_len_shape = [batch_size]
W_shape = [num_directions, 4 * hidden_size, input_size]
R_shape = [num_directions, 4 * hidden_size, hidden_size]
B_shape = [num_directions, 4 * hidden_size]
parameter_X = ng.parameter(X_shape, name="X", dtype=dtype)
parameter_H_t = ng.parameter(H_t_shape, name="H_t", dtype=dtype)
parameter_C_t = ng.parameter(C_t_shape, name="C_t", dtype=dtype)
parameter_seq_len = ng.parameter(seq_len_shape, name="seq_len", dtype=np.int32)
parameter_W = ng.parameter(W_shape, name="W", dtype=dtype)
parameter_R = ng.parameter(R_shape, name="R", dtype=dtype)
parameter_B = ng.parameter(B_shape, name="B", dtype=dtype)
direction = "REVERSE"
node_default = ng_opset1.lstm_sequence(
parameter_X,
parameter_H_t,
parameter_C_t,
parameter_seq_len,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
)
assert node_default.get_type_name() == "LSTMSequence"
assert node_default.get_output_size() == 3
activations = ["RELU", "tanh", "Sigmoid"]
activation_alpha = [1.0, 2.0, 3.0]
activation_beta = [3.0, 2.0, 1.0]
clip = 1.22
node_param = ng_opset1.lstm_sequence(
parameter_X,
parameter_H_t,
parameter_C_t,
parameter_seq_len,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
activations,
activation_alpha,
activation_beta,
clip,
)
assert node_param.get_type_name() == "LSTMSequence"
assert node_param.get_output_size() == 3
@pytest.mark.parametrize("dtype", [np.float32, np.float64])
def test_lstm_sequence_operator_forward_opset1(dtype):
batch_size = 2
input_size = 4
hidden_size = 3
num_directions = 1
seq_length = 2
X_shape = [batch_size, seq_length, input_size]
H_t_shape = [batch_size, num_directions, hidden_size]
C_t_shape = [batch_size, num_directions, hidden_size]
seq_len_shape = [batch_size]
W_shape = [num_directions, 4 * hidden_size, input_size]
R_shape = [num_directions, 4 * hidden_size, hidden_size]
B_shape = [num_directions, 4 * hidden_size]
parameter_X = ng.parameter(X_shape, name="X", dtype=dtype)
parameter_H_t = ng.parameter(H_t_shape, name="H_t", dtype=dtype)
parameter_C_t = ng.parameter(C_t_shape, name="C_t", dtype=dtype)
parameter_seq_len = ng.parameter(seq_len_shape, name="seq_len", dtype=np.int32)
parameter_W = ng.parameter(W_shape, name="W", dtype=dtype)
parameter_R = ng.parameter(R_shape, name="R", dtype=dtype)
parameter_B = ng.parameter(B_shape, name="B", dtype=dtype)
direction = "forward"
node_default = ng_opset1.lstm_sequence(
parameter_X,
parameter_H_t,
parameter_C_t,
parameter_seq_len,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
)
assert node_default.get_type_name() == "LSTMSequence"
assert node_default.get_output_size() == 3
activations = ["RELU", "tanh", "Sigmoid"]
activation_alpha = [2.0]
activation_beta = [1.0]
clip = 0.5
node = ng_opset1.lstm_sequence(
parameter_X,
parameter_H_t,
parameter_C_t,
parameter_seq_len,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
activations,
activation_alpha,
activation_beta,
clip,
)
assert node.get_type_name() == "LSTMSequence"
assert node.get_output_size() == 3
def test_gru_cell_operator():
batch_size = 1
input_size = 16
hidden_size = 128
X_shape = [batch_size, input_size]
H_t_shape = [batch_size, hidden_size]
W_shape = [3 * hidden_size, input_size]
R_shape = [3 * hidden_size, hidden_size]
B_shape = [3 * hidden_size]
parameter_X = ng.parameter(X_shape, name="X", dtype=np.float32)
parameter_H_t = ng.parameter(H_t_shape, name="H_t", dtype=np.float32)
parameter_W = ng.parameter(W_shape, name="W", dtype=np.float32)
parameter_R = ng.parameter(R_shape, name="R", dtype=np.float32)
parameter_B = ng.parameter(B_shape, name="B", dtype=np.float32)
expected_shape = [1, 128]
node_default = ng.gru_cell(parameter_X, parameter_H_t, parameter_W, parameter_R, parameter_B, hidden_size)
assert node_default.get_type_name() == "GRUCell"
assert node_default.get_output_size() == 1
assert list(node_default.get_output_shape(0)) == expected_shape
activations = ["tanh", "relu"]
activations_alpha = [1.0, 2.0]
activations_beta = [1.0, 2.0]
clip = 0.5
linear_before_reset = True
# If *linear_before_reset* is set True, then B tensor shape must be [4 * hidden_size]
B_shape = [4 * hidden_size]
parameter_B = ng.parameter(B_shape, name="B", dtype=np.float32)
node_param = ng.gru_cell(
parameter_X,
parameter_H_t,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
activations,
activations_alpha,
activations_beta,
clip,
linear_before_reset,
)
assert node_param.get_type_name() == "GRUCell"
assert node_param.get_output_size() == 1
assert list(node_param.get_output_shape(0)) == expected_shape
def test_gru_sequence():
batch_size = 2
input_size = 16
hidden_size = 32
seq_len = 8
seq_lengths = [seq_len] * batch_size
num_directions = 1
direction = "FORWARD"
X_shape = [batch_size, seq_len, input_size]
H_t_shape = [batch_size, num_directions, hidden_size]
W_shape = [num_directions, 3 * hidden_size, input_size]
R_shape = [num_directions, 3 * hidden_size, hidden_size]
B_shape = [num_directions, 3 * hidden_size]
parameter_X = ng.parameter(X_shape, name="X", dtype=np.float32)
parameter_H_t = ng.parameter(H_t_shape, name="H_t", dtype=np.float32)
parameter_W = ng.parameter(W_shape, name="W", dtype=np.float32)
parameter_R = ng.parameter(R_shape, name="R", dtype=np.float32)
parameter_B = ng.parameter(B_shape, name="B", dtype=np.float32)
expected_shape_y = [batch_size, num_directions, seq_len, hidden_size]
expected_shape_h = [batch_size, num_directions, hidden_size]
node_default = ng.gru_sequence(
parameter_X,
parameter_H_t,
seq_lengths,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
)
assert node_default.get_type_name() == "GRUSequence"
assert node_default.get_output_size() == 2
assert list(node_default.get_output_shape(0)) == expected_shape_y
assert list(node_default.get_output_shape(1)) == expected_shape_h
activations = ["tanh", "relu"]
activations_alpha = [1.0, 2.0]
activations_beta = [1.0, 2.0]
clip = 0.5
linear_before_reset = True
# If *linear_before_reset* is set True, then B tensor shape must be [4 * hidden_size]
B_shape = [num_directions, 4 * hidden_size]
parameter_B = ng.parameter(B_shape, name="B", dtype=np.float32)
node_param = ng.gru_sequence(
parameter_X,
parameter_H_t,
seq_lengths,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
activations,
activations_alpha,
activations_beta,
clip,
linear_before_reset,
)
assert node_param.get_type_name() == "GRUSequence"
assert node_param.get_output_size() == 2
assert list(node_param.get_output_shape(0)) == expected_shape_y
assert list(node_param.get_output_shape(1)) == expected_shape_h
def test_rnn_sequence():
batch_size = 2
input_size = 16
hidden_size = 32
seq_len = 8
seq_lengths = [seq_len] * batch_size
num_directions = 1
direction = "FORWARD"
X_shape = [batch_size, seq_len, input_size]
H_t_shape = [batch_size, num_directions, hidden_size]
W_shape = [num_directions, hidden_size, input_size]
R_shape = [num_directions, hidden_size, hidden_size]
B_shape = [num_directions, hidden_size]
parameter_X = ng.parameter(X_shape, name="X", dtype=np.float32)
parameter_H_t = ng.parameter(H_t_shape, name="H_t", dtype=np.float32)
parameter_W = ng.parameter(W_shape, name="W", dtype=np.float32)
parameter_R = ng.parameter(R_shape, name="R", dtype=np.float32)
parameter_B = ng.parameter(B_shape, name="B", dtype=np.float32)
expected_shape_y = [batch_size, num_directions, seq_len, hidden_size]
expected_shape_h = [batch_size, num_directions, hidden_size]
node_default = ng.rnn_sequence(
parameter_X,
parameter_H_t,
seq_lengths,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
)
assert node_default.get_type_name() == "RNNSequence"
assert node_default.get_output_size() == 2
assert list(node_default.get_output_shape(0)) == expected_shape_y
assert list(node_default.get_output_shape(1)) == expected_shape_h
activations = ["relu"]
activations_alpha = [2.0]
activations_beta = [1.0]
clip = 0.5
node_param = ng.rnn_sequence(
parameter_X,
parameter_H_t,
seq_lengths,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
activations,
activations_alpha,
activations_beta,
clip,
)
assert node_param.get_type_name() == "RNNSequence"
assert node_param.get_output_size() == 2
assert list(node_param.get_output_shape(0)) == expected_shape_y
assert list(node_param.get_output_shape(1)) == expected_shape_h
@skip_segfault
def test_loop():
trip_count = 8
condition = True
node_default = ng.loop(trip_count, condition)
assert node_default.get_type_name() == "Loop"
def test_roi_pooling():
inputs = ng.parameter([2, 3, 4, 5], dtype=np.float32)
coords = ng.parameter([150, 5], dtype=np.float32)
node = ng.roi_pooling(inputs, coords, [6, 6], 0.0625, "Max")
assert node.get_type_name() == "ROIPooling"
assert node.get_output_size() == [6, 6]
assert list(node.get_output_shape(0)) == [150, 3, 6, 6]
assert node.get_output_element_type(0) == Type.f32
def test_psroi_pooling():
inputs = ng.parameter([1, 3, 4, 5], dtype=np.float32)
coords = ng.parameter([150, 5], dtype=np.float32)
node = ng.psroi_pooling(inputs, coords, 2, 6, 0.0625, 0, 0, "Avg")
assert node.get_type_name() == "PSROIPooling"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == [150, 2, 6, 6]
assert node.get_output_element_type(0) == Type.f32
def test_convert_like():
parameter_data = ng.parameter([1, 2, 3, 4], name="data", dtype=np.float32)
like = ng.constant(1, dtype=np.int8)
node = ng.convert_like(parameter_data, like)
assert node.get_type_name() == "ConvertLike"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == [1, 2, 3, 4]
assert node.get_output_element_type(0) == Type.i8
def test_bucketize():
data = ng.parameter([4, 3, 2, 1], name="data", dtype=np.float32)
buckets = ng.parameter([5], name="buckets", dtype=np.int64)
node = ng.bucketize(data, buckets, "i32")
assert node.get_type_name() == "Bucketize"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == [4, 3, 2, 1]
assert node.get_output_element_type(0) == Type.i32
def test_region_yolo():
data = ng.parameter([1, 125, 13, 13], name="input", dtype=np.float32)
num_coords = 4
num_classes = 80
num_regions = 1
mask = [6, 7, 8]
axis = 0
end_axis = 3
do_softmax = False
node = ng.region_yolo(data, num_coords, num_classes, num_regions, do_softmax, mask, axis, end_axis)
assert node.get_type_name() == "RegionYolo"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == [1, (80 + 4 + 1) * 3, 13, 13]
assert node.get_output_element_type(0) == Type.f32
def test_reorg_yolo():
data = ng.parameter([2, 24, 34, 62], name="input", dtype=np.int32)
stride = [2]
node = ng.reorg_yolo(data, stride)
assert node.get_type_name() == "ReorgYolo"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == [2, 96, 17, 31]
assert node.get_output_element_type(0) == Type.i32
def test_embedding_bag_offsets_sum_1():
emb_table = ng.parameter([5, 2], name="emb_table", dtype=np.float32)
indices = ng.parameter([4], name="indices", dtype=np.int64)
offsets = ng.parameter([3], name="offsets", dtype=np.int64)
default_index = ng.parameter([], name="default_index", dtype=np.int64)
node = ng.embedding_bag_offsets_sum(emb_table, indices, offsets, default_index)
assert node.get_type_name() == "EmbeddingBagOffsetsSum"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == [3, 2]
assert node.get_output_element_type(0) == Type.f32
def test_embedding_segments_sum_all_inputs():
emb_table = ng.parameter([5, 2], name="emb_table", dtype=np.float32)
indices = ng.parameter([4], name="indices", dtype=np.int64)
segment_ids = ng.parameter([4], name="segment_ids", dtype=np.int64)
num_segments = ng.parameter([], name="num_segments", dtype=np.int64)
default_index = ng.parameter([], name="default_index", dtype=np.int64)
per_sample_weights = ng.parameter([4], name="per_sample_weights", dtype=np.float32)
node = ng.embedding_segments_sum(
emb_table, indices, segment_ids, num_segments, default_index, per_sample_weights
)
assert node.get_type_name() == "EmbeddingSegmentsSum"
assert node.get_output_size() == 1
assert node.get_output_partial_shape(0).same_scheme(PartialShape([-1, 2]))
assert node.get_output_element_type(0) == Type.f32
def test_embedding_segments_sum_with_some_opt_inputs():
emb_table = ng.parameter([5, 2], name="emb_table", dtype=np.float32)
indices = ng.parameter([4], name="indices", dtype=np.int64)
segment_ids = ng.parameter([4], name="segment_ids", dtype=np.int64)
num_segments = ng.parameter([], name="num_segments", dtype=np.int64)
# only 1 out of 3 optional inputs
node = ng.embedding_segments_sum(emb_table, indices, segment_ids, num_segments)
assert node.get_type_name() == "EmbeddingSegmentsSum"
assert node.get_output_size() == 1
assert node.get_output_partial_shape(0).same_scheme(PartialShape([-1, 2]))
assert node.get_output_element_type(0) == Type.f32
def test_embedding_bag_packed_sum():
emb_table = ng.parameter([5, 2], name="emb_table", dtype=np.float32)
indices = ng.parameter([3, 3], name="indices", dtype=np.int64)
per_sample_weights = ng.parameter([3, 3], name="per_sample_weights", dtype=np.float32)
# only 1 out of 3 optional inputs
node = ng.embedding_bag_packed_sum(emb_table, indices, per_sample_weights)
assert node.get_type_name() == "EmbeddingBagPackedSum"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == [3, 2]
assert node.get_output_element_type(0) == Type.f32
@pytest.mark.parametrize("dtype", integral_np_types)
def test_interpolate(dtype):
image_shape = [1, 3, 1024, 1024]
output_shape = [64, 64]
attributes = {
"axes": [2, 3],
"mode": "cubic",
"pads_begin": np.array([2, 2], dtype=dtype),
}
image_node = ng.parameter(image_shape, dtype, name="Image")
node = ng.interpolate(image_node, output_shape, attributes)
expected_shape = [1, 3, 64, 64]
assert node.get_type_name() == "Interpolate"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == expected_shape
@pytest.mark.parametrize(
"int_dtype, fp_dtype",
[
(np.int8, np.float32),
(np.int16, np.float32),
(np.int32, np.float32),
(np.int64, np.float32),
(np.uint8, np.float32),
(np.uint16, np.float32),
(np.uint32, np.float32),
(np.uint64, np.float32),
(np.int32, np.float16),
(np.int32, np.float64),
],
)
def test_prior_box(int_dtype, fp_dtype):
image_shape = np.array([64, 64], dtype=int_dtype)
attributes = {
"offset": fp_dtype(0),
"min_size": np.array([2, 3], dtype=fp_dtype),
"aspect_ratio": np.array([1.5, 2.0, 2.5], dtype=fp_dtype),
"scale_all_sizes": False
}
layer_shape = ng.constant(np.array([32, 32], dtype=int_dtype), int_dtype)
node = ng.prior_box(layer_shape, image_shape, attributes)
assert node.get_type_name() == "PriorBox"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == [2, 20480]
@pytest.mark.parametrize(
"int_dtype, fp_dtype",
[
(np.int8, np.float32),
(np.int16, np.float32),
(np.int32, np.float32),
(np.int64, np.float32),
(np.uint8, np.float32),
(np.uint16, np.float32),
(np.uint32, np.float32),
(np.uint64, np.float32),
(np.int32, np.float16),
(np.int32, np.float64),
],
)
def test_prior_box_clustered(int_dtype, fp_dtype):
image_size = np.array([64, 64], dtype=int_dtype)
attributes = {
"offset": fp_dtype(0.5),
"width": np.array([4.0, 2.0, 3.2], dtype=fp_dtype),
"height": np.array([1.0, 2.0, 1.0], dtype=fp_dtype),
}
output_size = ng.constant(np.array([19, 19], dtype=int_dtype), int_dtype)
node = ng.prior_box_clustered(output_size, image_size, attributes)
assert node.get_type_name() == "PriorBoxClustered"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == [2, 4332]
@pytest.mark.parametrize(
"int_dtype, fp_dtype",
[
(np.int8, np.float32),
(np.int16, np.float32),
(np.int32, np.float32),
(np.int64, np.float32),
(np.uint8, np.float32),
(np.uint16, np.float32),
(np.uint32, np.float32),
(np.uint64, np.float32),
(np.int32, np.float16),
(np.int32, np.float64),
],
)
def test_detection_output(int_dtype, fp_dtype):
attributes = {
"num_classes": int_dtype(85),
"keep_top_k": np.array([64], dtype=int_dtype),
"nms_threshold": fp_dtype(0.645),
}
box_logits = ng.parameter([4, 1, 5, 5], fp_dtype, "box_logits")
class_preds = ng.parameter([2, 1, 4, 5], fp_dtype, "class_preds")
proposals = ng.parameter([2, 1, 4, 5], fp_dtype, "proposals")
aux_class_preds = ng.parameter([2, 1, 4, 5], fp_dtype, "aux_class_preds")
aux_box_preds = ng.parameter([2, 1, 4, 5], fp_dtype, "aux_box_preds")
node = ng.detection_output(box_logits, class_preds, proposals, attributes, aux_class_preds, aux_box_preds)
assert node.get_type_name() == "DetectionOutput"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == [1, 1, 256, 7]
@pytest.mark.parametrize(
"int_dtype, fp_dtype",
[
(np.uint8, np.float32),
(np.uint16, np.float32),
(np.uint32, np.float32),
(np.uint64, np.float32),
(np.uint32, np.float16),
(np.uint32, np.float64),
],
)
def test_proposal(int_dtype, fp_dtype):
attributes = {
"base_size": int_dtype(1),
"pre_nms_topn": int_dtype(20),
"post_nms_topn": int_dtype(64),
"nms_thresh": fp_dtype(0.34),
"feat_stride": int_dtype(16),
"min_size": int_dtype(32),
"ratio": np.array([0.1, 1.5, 2.0, 2.5], dtype=fp_dtype),
"scale": np.array([2, 3, 3, 4], dtype=fp_dtype),
}
batch_size = 7
class_probs = ng.parameter([batch_size, 12, 34, 62], fp_dtype, "class_probs")
bbox_deltas = ng.parameter([batch_size, 24, 34, 62], fp_dtype, "bbox_deltas")
image_shape = ng.parameter([3], fp_dtype, "image_shape")
node = ng.proposal(class_probs, bbox_deltas, image_shape, attributes)
assert node.get_type_name() == "Proposal"
assert node.get_output_size() == 2
assert list(node.get_output_shape(0)) == [batch_size * attributes["post_nms_topn"], 5]
def test_tensor_iterator():
from ngraph.utils.tensor_iterator_types import (
GraphBody,
TensorIteratorSliceInputDesc,
TensorIteratorMergedInputDesc,
TensorIteratorInvariantInputDesc,
TensorIteratorBodyOutputDesc,
TensorIteratorConcatOutputDesc,
)
# Body parameters
body_timestep = ng.parameter([], np.int32, "timestep")
body_data_in = ng.parameter([1, 2, 2], np.float32, "body_in")
body_prev_cma = ng.parameter([2, 2], np.float32, "body_prev_cma")
body_const_one = ng.parameter([], np.int32, "body_const_one")
# CMA = cumulative moving average
prev_cum_sum = ng.multiply(ng.convert(body_timestep, "f32"), body_prev_cma)
curr_cum_sum = ng.add(prev_cum_sum, ng.squeeze(body_data_in, [0]))
elem_cnt = ng.add(body_const_one, body_timestep)
curr_cma = ng.divide(curr_cum_sum, ng.convert(elem_cnt, "f32"))
cma_hist = ng.unsqueeze(curr_cma, [0])
# TI inputs
data = ng.parameter([16, 2, 2], np.float32, "data")
# Iterations count
zero = ng.constant(0, dtype=np.int32)
one = ng.constant(1, dtype=np.int32)
initial_cma = ng.constant(np.zeros([2, 2], dtype=np.float32), dtype=np.float32)
iter_cnt = ng.range(zero, np.int32(16), np.int32(1))
ti_inputs = [iter_cnt, data, initial_cma, one]
graph_body = GraphBody([body_timestep, body_data_in, body_prev_cma, body_const_one], [curr_cma, cma_hist])
ti_slice_input_desc = [
# timestep
# input_idx, body_param_idx, start, stride, part_size, end, axis
TensorIteratorSliceInputDesc(0, 0, 0, 1, 1, -1, 0),
# data
TensorIteratorSliceInputDesc(1, 1, 0, 1, 1, -1, 0),
]
ti_merged_input_desc = [
# body prev/curr_cma
TensorIteratorMergedInputDesc(2, 2, 0),
]
ti_invariant_input_desc = [
# body const one
TensorIteratorInvariantInputDesc(3, 3),
]
# TI outputs
ti_body_output_desc = [
# final average
TensorIteratorBodyOutputDesc(0, 0, -1),
]
ti_concat_output_desc = [
# history of cma
TensorIteratorConcatOutputDesc(1, 1, 0, 1, 1, -1, 0),
]
node = ng.tensor_iterator(
ti_inputs,
graph_body,
ti_slice_input_desc,
ti_merged_input_desc,
ti_invariant_input_desc,
ti_body_output_desc,
ti_concat_output_desc,
)
assert node.get_type_name() == "TensorIterator"
assert node.get_output_size() == 2
# final average
assert list(node.get_output_shape(0)) == [2, 2]
# cma history
assert list(node.get_output_shape(1)) == [16, 2, 2]
def test_read_value():
init_value = ng.parameter([2, 2], name="init_value", dtype=np.int32)
node = ng.read_value(init_value, "var_id_667")
assert node.get_type_name() == "ReadValue"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == [2, 2]
assert node.get_output_element_type(0) == Type.i32
def test_assign():
input_data = ng.parameter([5, 7], name="input_data", dtype=np.int32)
rv = ng.read_value(input_data, "var_id_667")
node = ng.assign(rv, "var_id_667")
assert node.get_type_name() == "Assign"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == [5, 7]
assert node.get_output_element_type(0) == Type.i32
def test_extract_image_patches():
image = ng.parameter([64, 3, 10, 10], name="image", dtype=np.int32)
sizes = [3, 3]
strides = [5, 5]
rates = [1, 1]
padding = "VALID"
node = ng.extract_image_patches(image, sizes, strides, rates, padding)
assert node.get_type_name() == "ExtractImagePatches"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == [64, 27, 2, 2]
assert node.get_output_element_type(0) == Type.i32
@pytest.mark.parametrize("dtype", [np.float32, np.float64])
def test_lstm_sequence_operator_bidirectional(dtype):
batch_size = 1
input_size = 16
hidden_size = 128
num_directions = 2
seq_length = 2
X_shape = [batch_size, seq_length, input_size]
H_t_shape = [batch_size, num_directions, hidden_size]
C_t_shape = [batch_size, num_directions, hidden_size]
seq_len_shape = [batch_size]
W_shape = [num_directions, 4 * hidden_size, input_size]
R_shape = [num_directions, 4 * hidden_size, hidden_size]
B_shape = [num_directions, 4 * hidden_size]
parameter_X = ng.parameter(X_shape, name="X", dtype=dtype)
parameter_H_t = ng.parameter(H_t_shape, name="H_t", dtype=dtype)
parameter_C_t = ng.parameter(C_t_shape, name="C_t", dtype=dtype)
parameter_seq_len = ng.parameter(seq_len_shape, name="seq_len", dtype=np.int32)
parameter_W = ng.parameter(W_shape, name="W", dtype=dtype)
parameter_R = ng.parameter(R_shape, name="R", dtype=dtype)
parameter_B = ng.parameter(B_shape, name="B", dtype=dtype)
direction = "BIDIRECTIONAL"
node = ng.lstm_sequence(
parameter_X,
parameter_H_t,
parameter_C_t,
parameter_seq_len,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
)
assert node.get_type_name() == "LSTMSequence"
assert node.get_output_size() == 3
activations = ["RELU", "tanh", "Sigmoid"]
activation_alpha = [1.0, 2.0, 3.0]
activation_beta = [3.0, 2.0, 1.0]
clip = 1.22
node_param = ng.lstm_sequence(
parameter_X,
parameter_H_t,
parameter_C_t,
parameter_seq_len,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
activations,
activation_alpha,
activation_beta,
clip,
)
assert node_param.get_type_name() == "LSTMSequence"
assert node_param.get_output_size() == 3
@pytest.mark.parametrize("dtype", [np.float32, np.float64])
def test_lstm_sequence_operator_reverse(dtype):
batch_size = 2
input_size = 4
hidden_size = 3
num_directions = 1
seq_length = 2
X_shape = [batch_size, seq_length, input_size]
H_t_shape = [batch_size, num_directions, hidden_size]
C_t_shape = [batch_size, num_directions, hidden_size]
seq_len_shape = [batch_size]
W_shape = [num_directions, 4 * hidden_size, input_size]
R_shape = [num_directions, 4 * hidden_size, hidden_size]
B_shape = [num_directions, 4 * hidden_size]
parameter_X = ng.parameter(X_shape, name="X", dtype=dtype)
parameter_H_t = ng.parameter(H_t_shape, name="H_t", dtype=dtype)
parameter_C_t = ng.parameter(C_t_shape, name="C_t", dtype=dtype)
parameter_seq_len = ng.parameter(seq_len_shape, name="seq_len", dtype=np.int32)
parameter_W = ng.parameter(W_shape, name="W", dtype=dtype)
parameter_R = ng.parameter(R_shape, name="R", dtype=dtype)
parameter_B = ng.parameter(B_shape, name="B", dtype=dtype)
direction = "REVERSE"
node_default = ng.lstm_sequence(
parameter_X,
parameter_H_t,
parameter_C_t,
parameter_seq_len,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
)
assert node_default.get_type_name() == "LSTMSequence"
assert node_default.get_output_size() == 3
activations = ["RELU", "tanh", "Sigmoid"]
activation_alpha = [1.0, 2.0, 3.0]
activation_beta = [3.0, 2.0, 1.0]
clip = 1.22
node_param = ng.lstm_sequence(
parameter_X,
parameter_H_t,
parameter_C_t,
parameter_seq_len,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
activations,
activation_alpha,
activation_beta,
clip,
)
assert node_param.get_type_name() == "LSTMSequence"
assert node_param.get_output_size() == 3
@pytest.mark.parametrize("dtype", [np.float32, np.float64])
def test_lstm_sequence_operator_forward(dtype):
batch_size = 2
input_size = 4
hidden_size = 3
num_directions = 1
seq_length = 2
X_shape = [batch_size, seq_length, input_size]
H_t_shape = [batch_size, num_directions, hidden_size]
C_t_shape = [batch_size, num_directions, hidden_size]
seq_len_shape = [batch_size]
W_shape = [num_directions, 4 * hidden_size, input_size]
R_shape = [num_directions, 4 * hidden_size, hidden_size]
B_shape = [num_directions, 4 * hidden_size]
parameter_X = ng.parameter(X_shape, name="X", dtype=dtype)
parameter_H_t = ng.parameter(H_t_shape, name="H_t", dtype=dtype)
parameter_C_t = ng.parameter(C_t_shape, name="C_t", dtype=dtype)
parameter_seq_len = ng.parameter(seq_len_shape, name="seq_len", dtype=np.int32)
parameter_W = ng.parameter(W_shape, name="W", dtype=dtype)
parameter_R = ng.parameter(R_shape, name="R", dtype=dtype)
parameter_B = ng.parameter(B_shape, name="B", dtype=dtype)
direction = "forward"
node_default = ng.lstm_sequence(
parameter_X,
parameter_H_t,
parameter_C_t,
parameter_seq_len,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
)
assert node_default.get_type_name() == "LSTMSequence"
assert node_default.get_output_size() == 3
activations = ["RELU", "tanh", "Sigmoid"]
activation_alpha = [2.0]
activation_beta = [1.0]
clip = 0.5
node = ng.lstm_sequence(
parameter_X,
parameter_H_t,
parameter_C_t,
parameter_seq_len,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
activations,
activation_alpha,
activation_beta,
clip,
)
assert node.get_type_name() == "LSTMSequence"
assert node.get_output_size() == 3
@pytest.mark.parametrize("dtype", [np.float32, np.float64])
def test_gru_sequence_operator_bidirectional(dtype):
batch_size = 1
input_size = 16
hidden_size = 128
num_directions = 2
seq_length = 2
X_shape = [batch_size, seq_length, input_size]
H_t_shape = [batch_size, num_directions, hidden_size]
seq_len_shape = [batch_size]
W_shape = [num_directions, 3 * hidden_size, input_size]
R_shape = [num_directions, 3 * hidden_size, hidden_size]
B_shape = [num_directions, 3 * hidden_size]
parameter_X = ng.parameter(X_shape, name="X", dtype=dtype)
parameter_H_t = ng.parameter(H_t_shape, name="H_t", dtype=dtype)
parameter_seq_len = ng.parameter(seq_len_shape, name="seq_len", dtype=np.int32)
parameter_W = ng.parameter(W_shape, name="W", dtype=dtype)
parameter_R = ng.parameter(R_shape, name="R", dtype=dtype)
parameter_B = ng.parameter(B_shape, name="B", dtype=dtype)
direction = "BIDIRECTIONAL"
node = ng.gru_sequence(
parameter_X,
parameter_H_t,
parameter_seq_len,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
)
assert node.get_type_name() == "GRUSequence"
assert node.get_output_size() == 2
activations = ["RELU", "tanh"]
activation_alpha = [1.0, 2.0, 3.0]
activation_beta = [3.0, 2.0, 1.0]
clip = 1.22
linear_before_reset = True
B_shape = [num_directions, 4 * hidden_size]
parameter_B = ng.parameter(B_shape, name="B", dtype=dtype)
node_param = ng.gru_sequence(
parameter_X,
parameter_H_t,
parameter_seq_len,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
activations,
activation_alpha,
activation_beta,
clip,
linear_before_reset
)
assert node_param.get_type_name() == "GRUSequence"
assert node_param.get_output_size() == 2
@pytest.mark.parametrize("dtype", [np.float32, np.float64])
def test_gru_sequence_operator_reverse(dtype):
batch_size = 2
input_size = 4
hidden_size = 3
num_directions = 1
seq_length = 2
X_shape = [batch_size, seq_length, input_size]
H_t_shape = [batch_size, num_directions, hidden_size]
seq_len_shape = [batch_size]
W_shape = [num_directions, 3 * hidden_size, input_size]
R_shape = [num_directions, 3 * hidden_size, hidden_size]
B_shape = [num_directions, 3 * hidden_size]
parameter_X = ng.parameter(X_shape, name="X", dtype=dtype)
parameter_H_t = ng.parameter(H_t_shape, name="H_t", dtype=dtype)
parameter_seq_len = ng.parameter(seq_len_shape, name="seq_len", dtype=np.int32)
parameter_W = ng.parameter(W_shape, name="W", dtype=dtype)
parameter_R = ng.parameter(R_shape, name="R", dtype=dtype)
parameter_B = ng.parameter(B_shape, name="B", dtype=dtype)
direction = "REVERSE"
node_default = ng.gru_sequence(
parameter_X,
parameter_H_t,
parameter_seq_len,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
)
assert node_default.get_type_name() == "GRUSequence"
assert node_default.get_output_size() == 2
activations = ["RELU", "tanh"]
activation_alpha = [1.0, 2.0, 3.0]
activation_beta = [3.0, 2.0, 1.0]
clip = 1.22
linear_before_reset = True
B_shape = [num_directions, 4 * hidden_size]
parameter_B = ng.parameter(B_shape, name="B", dtype=dtype)
node_param = ng.gru_sequence(
parameter_X,
parameter_H_t,
parameter_seq_len,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
activations,
activation_alpha,
activation_beta,
clip,
linear_before_reset
)
assert node_param.get_type_name() == "GRUSequence"
assert node_param.get_output_size() == 2
@pytest.mark.parametrize("dtype", [np.float32, np.float64])
def test_gru_sequence_operator_forward(dtype):
batch_size = 2
input_size = 4
hidden_size = 3
num_directions = 1
seq_length = 2
X_shape = [batch_size, seq_length, input_size]
H_t_shape = [batch_size, num_directions, hidden_size]
seq_len_shape = [batch_size]
W_shape = [num_directions, 3 * hidden_size, input_size]
R_shape = [num_directions, 3 * hidden_size, hidden_size]
B_shape = [num_directions, 3 * hidden_size]
parameter_X = ng.parameter(X_shape, name="X", dtype=dtype)
parameter_H_t = ng.parameter(H_t_shape, name="H_t", dtype=dtype)
parameter_seq_len = ng.parameter(seq_len_shape, name="seq_len", dtype=np.int32)
parameter_W = ng.parameter(W_shape, name="W", dtype=dtype)
parameter_R = ng.parameter(R_shape, name="R", dtype=dtype)
parameter_B = ng.parameter(B_shape, name="B", dtype=dtype)
direction = "forward"
node_default = ng.gru_sequence(
parameter_X,
parameter_H_t,
parameter_seq_len,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
)
assert node_default.get_type_name() == "GRUSequence"
assert node_default.get_output_size() == 2
activations = ["RELU", "tanh"]
activation_alpha = [2.0]
activation_beta = [1.0]
clip = 0.5
linear_before_reset = True
B_shape = [num_directions, 4 * hidden_size]
parameter_B = ng.parameter(B_shape, name="B", dtype=dtype)
node = ng.gru_sequence(
parameter_X,
parameter_H_t,
parameter_seq_len,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
activations,
activation_alpha,
activation_beta,
clip,
linear_before_reset
)
assert node.get_type_name() == "GRUSequence"
assert node.get_output_size() == 2
@pytest.mark.parametrize("dtype", [np.float32, np.float64])
def test_rnn_sequence_operator_bidirectional(dtype):
batch_size = 1
input_size = 16
hidden_size = 128
num_directions = 2
seq_length = 2
X_shape = [batch_size, seq_length, input_size]
H_t_shape = [batch_size, num_directions, hidden_size]
seq_len_shape = [batch_size]
W_shape = [num_directions, hidden_size, input_size]
R_shape = [num_directions, hidden_size, hidden_size]
B_shape = [num_directions, hidden_size]
parameter_X = ng.parameter(X_shape, name="X", dtype=dtype)
parameter_H_t = ng.parameter(H_t_shape, name="H_t", dtype=dtype)
parameter_seq_len = ng.parameter(seq_len_shape, name="seq_len", dtype=np.int32)
parameter_W = ng.parameter(W_shape, name="W", dtype=dtype)
parameter_R = ng.parameter(R_shape, name="R", dtype=dtype)
parameter_B = ng.parameter(B_shape, name="B", dtype=dtype)
direction = "BIDIRECTIONAL"
node = ng.rnn_sequence(
parameter_X,
parameter_H_t,
parameter_seq_len,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
)
assert node.get_type_name() == "RNNSequence"
assert node.get_output_size() == 2
activations = ["RELU", "tanh"]
activation_alpha = [1.0, 2.0, 3.0]
activation_beta = [3.0, 2.0, 1.0]
clip = 1.22
node_param = ng.rnn_sequence(
parameter_X,
parameter_H_t,
parameter_seq_len,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
activations,
activation_alpha,
activation_beta,
clip,
)
assert node_param.get_type_name() == "RNNSequence"
assert node_param.get_output_size() == 2
@pytest.mark.parametrize("dtype", [np.float32, np.float64])
def test_rnn_sequence_operator_reverse(dtype):
batch_size = 2
input_size = 4
hidden_size = 3
num_directions = 1
seq_length = 2
X_shape = [batch_size, seq_length, input_size]
H_t_shape = [batch_size, num_directions, hidden_size]
seq_len_shape = [batch_size]
W_shape = [num_directions, hidden_size, input_size]
R_shape = [num_directions, hidden_size, hidden_size]
B_shape = [num_directions, hidden_size]
parameter_X = ng.parameter(X_shape, name="X", dtype=dtype)
parameter_H_t = ng.parameter(H_t_shape, name="H_t", dtype=dtype)
parameter_seq_len = ng.parameter(seq_len_shape, name="seq_len", dtype=np.int32)
parameter_W = ng.parameter(W_shape, name="W", dtype=dtype)
parameter_R = ng.parameter(R_shape, name="R", dtype=dtype)
parameter_B = ng.parameter(B_shape, name="B", dtype=dtype)
direction = "REVERSE"
node_default = ng.rnn_sequence(
parameter_X,
parameter_H_t,
parameter_seq_len,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
)
assert node_default.get_type_name() == "RNNSequence"
assert node_default.get_output_size() == 2
activations = ["RELU", "tanh"]
activation_alpha = [1.0, 2.0, 3.0]
activation_beta = [3.0, 2.0, 1.0]
clip = 1.22
node_param = ng.rnn_sequence(
parameter_X,
parameter_H_t,
parameter_seq_len,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
activations,
activation_alpha,
activation_beta,
clip,
)
assert node_param.get_type_name() == "RNNSequence"
assert node_param.get_output_size() == 2
@pytest.mark.parametrize("dtype", [np.float32, np.float64])
def test_rnn_sequence_operator_forward(dtype):
batch_size = 2
input_size = 4
hidden_size = 3
num_directions = 1
seq_length = 2
X_shape = [batch_size, seq_length, input_size]
H_t_shape = [batch_size, num_directions, hidden_size]
seq_len_shape = [batch_size]
W_shape = [num_directions, hidden_size, input_size]
R_shape = [num_directions, hidden_size, hidden_size]
B_shape = [num_directions, hidden_size]
parameter_X = ng.parameter(X_shape, name="X", dtype=dtype)
parameter_H_t = ng.parameter(H_t_shape, name="H_t", dtype=dtype)
parameter_seq_len = ng.parameter(seq_len_shape, name="seq_len", dtype=np.int32)
parameter_W = ng.parameter(W_shape, name="W", dtype=dtype)
parameter_R = ng.parameter(R_shape, name="R", dtype=dtype)
parameter_B = ng.parameter(B_shape, name="B", dtype=dtype)
direction = "forward"
node_default = ng.rnn_sequence(
parameter_X,
parameter_H_t,
parameter_seq_len,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
)
assert node_default.get_type_name() == "RNNSequence"
assert node_default.get_output_size() == 2
activations = ["RELU", "tanh"]
activation_alpha = [2.0]
activation_beta = [1.0]
clip = 0.5
node = ng.rnn_sequence(
parameter_X,
parameter_H_t,
parameter_seq_len,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
activations,
activation_alpha,
activation_beta,
clip,
)
assert node.get_type_name() == "RNNSequence"
assert node.get_output_size() == 2
| 31.701555
| 110
| 0.660108
| 7,394
| 53,005
| 4.413714
| 0.054909
| 0.056963
| 0.032266
| 0.027945
| 0.831776
| 0.811981
| 0.798131
| 0.791236
| 0.777723
| 0.771503
| 0
| 0.035323
| 0.218602
| 53,005
| 1,671
| 111
| 31.720527
| 0.75262
| 0.022677
| 0
| 0.736842
| 0
| 0
| 0.042265
| 0.001661
| 0
| 0
| 0
| 0
| 0.130467
| 1
| 0.031875
| false
| 0
| 0.00593
| 0
| 0.037806
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7cb91ef62b6d238b31353a27cb8d256ad0c727ff
| 4,445
|
py
|
Python
|
notebooks/planning/helpers.py
|
janiapurv/swarm-reinforcement-learning
|
118c952f96e0c6cc093996fa1c76e7cc8a544bad
|
[
"MIT"
] | null | null | null |
notebooks/planning/helpers.py
|
janiapurv/swarm-reinforcement-learning
|
118c952f96e0c6cc093996fa1c76e7cc8a544bad
|
[
"MIT"
] | null | null | null |
notebooks/planning/helpers.py
|
janiapurv/swarm-reinforcement-learning
|
118c952f96e0c6cc093996fa1c76e7cc8a544bad
|
[
"MIT"
] | null | null | null |
"""
Helper methods and variables
============================
"""
import plotly.graph_objs as go
def obstacle_generator(obstacle_map):
"""
Generates a grid map with obstacles for testing
Args:
obstacle_map: Numpy array of zeros of shape atleast 100x100x100
Returns:
Numpy array with obstacles marked as 1
"""
for i in range(20, 41):
for j in range(20, 41):
for k in range(20, 41):
obstacle_map[i][j][k] = 0
for i in range(60, 81):
for j in range(20, 41):
for k in range(20, 41):
obstacle_map[i][j][k] = 0
for i in range(20, 41):
for j in range(20, 41):
for k in range(60, 81):
obstacle_map[i][j][k] = 0
for i in range(60, 81):
for j in range(20, 41):
for k in range(60, 81):
obstacle_map[i][j][k] = 0
for i in range(60, 81):
for j in range(60, 81):
for k in range(20, 41):
obstacle_map[i][j][k] = 0
for i in range(20, 41):
for j in range(60, 81):
for k in range(20, 41):
obstacle_map[i][j][k] = 0
for i in range(60, 81):
for j in range(60, 81):
for k in range(60, 81):
obstacle_map[i][j][k] = 0
for i in range(20, 41):
for j in range(60, 81):
for k in range(60, 81):
obstacle_map[i][j][k] = 0
return obstacle_map
obs = go.Mesh3d(x=[20, 20, 40, 40, 20, 20, 40, 40],
y=[20, 40, 40, 20, 20, 40, 40, 20],
z=[20, 20, 20, 20, 40, 40, 40, 40],
i=[7, 0, 0, 0, 4, 4, 6, 6, 4, 0, 3, 2],
j=[3, 4, 1, 2, 5, 6, 5, 2, 0, 1, 6, 3],
k=[0, 7, 2, 3, 6, 7, 1, 1, 5, 5, 7, 6],
color='purple')
obs1 = go.Mesh3d(x=[60, 60, 80, 80, 60, 60, 80, 80],
y=[20, 40, 40, 20, 20, 40, 40, 20],
z=[20, 20, 20, 20, 40, 40, 40, 40],
i=[7, 0, 0, 0, 4, 4, 6, 6, 4, 0, 3, 2],
j=[3, 4, 1, 2, 5, 6, 5, 2, 0, 1, 6, 3],
k=[0, 7, 2, 3, 6, 7, 1, 1, 5, 5, 7, 6],
color='purple')
obs2 = go.Mesh3d(x=[20, 20, 40, 40, 20, 20, 40, 40],
y=[20, 40, 40, 20, 20, 40, 40, 20],
z=[60, 60, 60, 60, 80, 80, 80, 80],
i=[7, 0, 0, 0, 4, 4, 6, 6, 4, 0, 3, 2],
j=[3, 4, 1, 2, 5, 6, 5, 2, 0, 1, 6, 3],
k=[0, 7, 2, 3, 6, 7, 1, 1, 5, 5, 7, 6],
color='purple')
obs3 = go.Mesh3d(x=[60, 60, 80, 80, 60, 60, 80, 80],
y=[20, 40, 40, 20, 20, 40, 40, 20],
z=[60, 60, 60, 60, 80, 80, 80, 80],
i=[7, 0, 0, 0, 4, 4, 6, 6, 4, 0, 3, 2],
j=[3, 4, 1, 2, 5, 6, 5, 2, 0, 1, 6, 3],
k=[0, 7, 2, 3, 6, 7, 1, 1, 5, 5, 7, 6],
color='purple')
obs4 = go.Mesh3d(x=[60, 60, 80, 80, 60, 60, 80, 80],
y=[60, 80, 80, 60, 60, 80, 80, 60],
z=[20, 20, 20, 20, 40, 40, 40, 40],
i=[7, 0, 0, 0, 4, 4, 6, 6, 4, 0, 3, 2],
j=[3, 4, 1, 2, 5, 6, 5, 2, 0, 1, 6, 3],
k=[0, 7, 2, 3, 6, 7, 1, 1, 5, 5, 7, 6],
color='purple')
obs5 = go.Mesh3d(x=[20, 20, 40, 40, 20, 20, 40, 40],
y=[60, 80, 80, 60, 60, 80, 80, 60],
z=[20, 20, 20, 20, 40, 40, 40, 40],
i=[7, 0, 0, 0, 4, 4, 6, 6, 4, 0, 3, 2],
j=[3, 4, 1, 2, 5, 6, 5, 2, 0, 1, 6, 3],
k=[0, 7, 2, 3, 6, 7, 1, 1, 5, 5, 7, 6],
color='purple')
obs6 = go.Mesh3d(x=[60, 60, 80, 80, 60, 60, 80, 80],
y=[60, 80, 80, 60, 60, 80, 80, 60],
z=[60, 60, 60, 60, 80, 80, 80, 80],
i=[7, 0, 0, 0, 4, 4, 6, 6, 4, 0, 3, 2],
j=[3, 4, 1, 2, 5, 6, 5, 2, 0, 1, 6, 3],
k=[0, 7, 2, 3, 6, 7, 1, 1, 5, 5, 7, 6],
color='purple')
obs7 = go.Mesh3d(x=[20, 20, 40, 40, 20, 20, 40, 40],
y=[60, 80, 80, 60, 60, 80, 80, 60],
z=[60, 60, 60, 60, 80, 80, 80, 80],
i=[7, 0, 0, 0, 4, 4, 6, 6, 4, 0, 3, 2],
j=[3, 4, 1, 2, 5, 6, 5, 2, 0, 1, 6, 3],
k=[0, 7, 2, 3, 6, 7, 1, 1, 5, 5, 7, 6],
color='purple')
meshes = [obs, obs1, obs2, obs3, obs4, obs5, obs6, obs7]
| 38.991228
| 71
| 0.375028
| 814
| 4,445
| 2.031941
| 0.081081
| 0.067715
| 0.072551
| 0.077388
| 0.817412
| 0.817412
| 0.817412
| 0.817412
| 0.817412
| 0.817412
| 0
| 0.316119
| 0.424972
| 4,445
| 113
| 72
| 39.336283
| 0.330986
| 0.052418
| 0
| 0.869565
| 1
| 0
| 0.011525
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.01087
| false
| 0
| 0.01087
| 0
| 0.032609
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
7cdc9614c25474564f4623de22ded062fa0e684a
| 31,034
|
py
|
Python
|
mridul_mtech_project.py
|
shaktimukker/bacterial-growth-analysis
|
8288745ec8c069ca1e91c60dcccd5dbdddaf510e
|
[
"MIT"
] | null | null | null |
mridul_mtech_project.py
|
shaktimukker/bacterial-growth-analysis
|
8288745ec8c069ca1e91c60dcccd5dbdddaf510e
|
[
"MIT"
] | null | null | null |
mridul_mtech_project.py
|
shaktimukker/bacterial-growth-analysis
|
8288745ec8c069ca1e91c60dcccd5dbdddaf510e
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Sat May 29 16:31:46 2021
@author: shakt
"""
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from scipy.optimize import curve_fit
from gekko import GEKKO
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
path=r"/Users/shaktimukker/Documents/MyProjects/projects/bacterial-growth-analysis/"
data=pd.read_excel(path+"data.xlsx")
# eq8
def fn_eq8(X,a1,a2,b0,b1,b2):
Y = X[:, 0]
T = X[:, 1]
Tmin,Tmax=18,45
#a1,a2,b0,b1,b2=para_eq8[0],para_eq8[1],para_eq8[2],para_eq8[3],para_eq8[4]
f=((a1*(T-Tmin)*(1-np.exp(a2*(T-Tmax))))**2)*(b0+b1*Y+b2*(Y**2))
return f
#eq9
def fn_eq9(X,a1,a2,b0,b1,b2):
Y = X[:, 0]
T = X[:, 1]
Tmin,Tmax=18,45
#a1,a2,b0,b1,b2=para_eq8[0],para_eq8[1],para_eq8[2],para_eq8[3],para_eq8[4]
f=(a1*(T-Tmin)*(1-np.exp(a2*(T-Tmax)))**2)*(b0+b1*Y+b2*(Y**2))
return f
guesses = [2.981,0.201,0.431,2.010,0.528]
guesses=[2.189, 0.284, 0.418, 1.220 ,0.867]
X=np.array(data[["Y","T"]].values.tolist())
f=data["Um"].values.tolist()
popt, pcov = curve_fit(fn_eq9, X, f, guesses)
sigma = np.sqrt(np.diag(pcov))
#plot
X = np.column_stack(X[:,0], X[:,1]) # independent variables
f = fn_eq8(X,*popt)
fig = plt.figure()
ax = fig.gca(projection = '3d')
ax.plot(X[:,0], X[:,1], f)
ax.set_xlabel('Y')
ax.set_ylabel('T')
ax.set_zlabel('f(Y,T)')
plt.savefig('images/graphical-mulvar-1.png')
### pyomo tutorial
path=r"/Users/shaktimukker/Documents/MyProjects/projects/bacterial-growth-analysis/"
import sys
sys.path.append(path)
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d.axes3d import Axes3D
import shutil
import sys
import os.path
import math
from pyomo.environ import *
from pyomo.dae import *
import C
def model_plot(m):
r = sorted(m.r)
t = sorted(m.t)
rgrid = np.zeros((len(t), len(r)))
tgrid = np.zeros((len(t), len(r)))
Tgrid = np.zeros((len(t), len(r)))
for i in range(0, len(t)):
for j in range(0, len(r)):
rgrid[i,j] = r[j]
tgrid[i,j] = t[i]
Tgrid[i,j] = m.T[t[i], r[j]].value
fig = plt.figure(figsize=(10,6))
ax = fig.add_subplot(1, 1, 1, projection='3d')
ax.set_xlabel('Distance r')
ax.set_ylabel('Time t')
ax.set_zlabel('Temperature T')
p = ax.plot_wireframe(rgrid, tgrid, Tgrid)
m = ConcreteModel()
m.r = ContinuousSet(bounds=(0,1))
m.t = ContinuousSet(bounds=(0,2))
m.T = Var(m.t, m.r)
m.dTdt = DerivativeVar(m.T, wrt=m.t)
m.dTdr = DerivativeVar(m.T, wrt=m.r)
m.d2Tdr2 = DerivativeVar(m.T, wrt=(m.r, m.r))
m.pde = Constraint(m.t, m.r, rule=lambda m, t, r: m.dTdt[t,r] == m.d2Tdr2[t,r] + (1/r)*m.dTdr[t,r]
if r > 0 and r < 1 and t > 0 else Constraint.Skip)
m.ic = Constraint(m.r, rule=lambda m, r: m.T[0,r] == 0)
m.bc1 = Constraint(m.t, rule=lambda m, t: m.T[t,1] == 1 if t > 0 else Constraint.Skip)
m.bc2 = Constraint(m.t, rule=lambda m, t: m.dTdr[t,0] == 0)
TransformationFactory('dae.finite_difference').apply_to(m, nfe=20, wrt=m.r, scheme='CENTRAL')
TransformationFactory('dae.finite_difference').apply_to(m, nfe=50, wrt=m.t, scheme='BACKWARD')
SolverFactory('ipopt').solve(m).write()
model_plot(m)
##2
m = ConcreteModel()
m.r = ContinuousSet(bounds=(0,1))
m.t = ContinuousSet(bounds=(0,2))
m.T = Var(m.t, m.r)
m.dTdt = DerivativeVar(m.T, wrt=m.t)
m.dTdr = DerivativeVar(m.T, wrt=m.r)
m.d2Tdr2 = DerivativeVar(m.T, wrt=(m.r, m.r))
@m.Constraint(m.t, m.r)
def pde(m, t, r):
if t == 0:
return Constraint.Skip
if r == 0 or r == 1:
return Constraint.Skip
return m.dTdt[t,r] == m.d2Tdr2[t,r]
m.obj = Objective(expr=1)
m.ic = Constraint(m.r, rule=lambda m, r: m.T[0,r] == 0 if r > 0 and r < 1 else Constraint.Skip)
m.bc1 = Constraint(m.t, rule=lambda m, t: m.T[t,1] == 1)
m.bc2 = Constraint(m.t, rule=lambda m, t: m.dTdr[t,0] == 0)
TransformationFactory('dae.finite_difference').apply_to(m, nfe=50, scheme='FORWARD', wrt=m.r)
TransformationFactory('dae.finite_difference').apply_to(m, nfe=50, scheme='FORWARD', wrt=m.t)
SolverFactory('ipopt').solve(m, tee=True).write()
model_plot(m)
###curve fitting
### solve all equations together using given parameters
###solve heat equation
# parameters value for 3 liter bioreactor
a,b,c,d=18.30, 3816.44, 227.02, 133.322
Cpa,Cps=1180,2500
IDS=.34
Ka,Ks=74.16, 1440
p,R=101325, .03
T0,Tj,Ta,Vz,W0=27, 30, 30, 60, 2.33*1000
X0,Y0,YQ,Z=2.1, 700, 8.4*1000, .6
epi,lmbda,rhoA,rhoS=.38, 2414.3, 1.14, 750
Um, Xm =.33,184
Vz=60
h=200
###solve 2D heat equation
m = ConcreteModel()
m.z = ContinuousSet(bounds=(0,C.Z))
m.r = ContinuousSet(bounds=(0,C.R))
m.t = ContinuousSet(bounds=(0,200))
m.T = Var(m.t, m.z, m.r)
m.dTdt = DerivativeVar(m.T, wrt=m.t)
m.dTdz = DerivativeVar(m.T, wrt=m.z)
m.d2Tdz2 = DerivativeVar(m.T, wrt=(m.z, m.z))
m.dTdr = DerivativeVar(m.T, wrt=m.r)
m.d2Tdr2 = DerivativeVar(m.T, wrt=(m.r, m.r))
#issues
#t,T,z=0,T0,0
def X(m):
return C.Xm/(1+((C.Xm/C.X0)-1)*np.exp(-C.Um*m.t))
def dXdt(m):
return C.Um*X(m)*(1-X(m)/C.Xm)
def dHdT1(m):
return .62413*C.b*C.p
def dHdT2(m):
return (m.T+C.c)**2
def dHdT3(m):
return C.d*np.exp(C.a-C.b/(m.T+C.c))
def dHdT4(m):
return (C.p/dHdT3(m)-1)**2
def dHdT(m):
return dHdT1(m)/(dHdT2(m)*dHdT4(m)*dHdT3(m))
def Cpb(m):
return (C.epi*C.rhoA*(C.Cpa+C.lmbda*dHdT(m)+(1-C.epi)*C.rhoS*C.Cps))/C.rhoB
def heRHS(m):
return C.rhoB*Cpb(m)*m.dTdt[t,z,r]
def heLHS1(m):
return C.rhoS*(1-C.epi)*C.YQ*dXdt(m)
def heLHS2(m):
return C.rhoA*C.Cpa*C.Vz*m.dTdz[t,z,r]
def heLHS3(m):
return C.rhoA*C.lmbda*C.Vz*dHdT(m)*m.dTdz[t,z,r]
def heLHS4(m):
return C.Kb*m.d2Tdz2[t,z,r]
def heLHS5(m):
return C.Kb*m.dTdr[t,z,r]/m.r+C.Kb*m.d2Tdr2[t,z,r]
m.pde = Constraint(m.t, m.z, m.r, rule=lambda m, t, z, r: heRHS(m) == heLHS1(m)-heLHS2(m)-heLHS3(m)+heLHS4(m)+heLHS5(m)
if r > 0 and r < C.R and z > 0 and z < C.Z and t > 0 else Constraint.Skip)
# =============================================================================
# m.ic = Constraint(m.r, rule=lambda m, r: m.T[0,r] == 0)
# m.bc1 = Constraint(m.t, rule=lambda m, t: m.T[t,1] == 1 if t > 0 else Constraint.Skip)
# m.bc2 = Constraint(m.t, rule=lambda m, t: m.dTdr[t,0] == 0)
#
# m.ic = Constraint(m.z, rule=lambda m, z: m.T[0,z] == 27)
# m.ic = Constraint(m.r, rule=lambda m, r: m.T[0,r] == 27)
# m.bc1 = Constraint(m.t, rule=lambda m, t: m.dTdz[t,Z] == 0 if t > 0 else Constraint.Skip)
# m.bc2 = Constraint(m.t, rule=lambda m, t: m.T[t,0] == Ta)
# m.bc3 = Constraint(m.t, rule=lambda m, t: m.dTdz[t,0] == 0 if t > 0 else Constraint.Skip)
# m.bc4 = Constraint(m.t, rule=lambda m, t: -Kb*m.dTdr[t,R] == h*(T-Tj))
# =============================================================================
m.ic = Constraint(m.z,m.r, rule=lambda m, z, r: m.T[0,z,r] == 27)
m.bc1 = Constraint(m.t,m.r, rule=lambda m, t,r: m.dTdz[t,C.Z,r] == 0 if t > 0 else Constraint.Skip)
m.bc2 = Constraint(m.t,m.r, rule=lambda m, t,r: m.T[t,0,r] == C.Ta)
m.bc3 = Constraint(m.t,m.z, rule=lambda m, t,z: m.dTdz[t,z,0] == 0 if t > 0 else Constraint.Skip)
m.bc4 = Constraint(m.t,m.z, rule=lambda m, t,z: C.Kb*m.dTdr[t,z,C.R]+C.h*(m.T[t,z,C.R]-C.Tj)==0)
TransformationFactory('dae.finite_difference').apply_to(m, nfe=3, wrt=m.t, scheme='CENTRAL')
TransformationFactory('dae.finite_difference').apply_to(m, nfe=3, wrt=m.z, scheme='CENTRAL')
TransformationFactory('dae.finite_difference').apply_to(m, nfe=3, wrt=m.r, scheme='CENTRAL')
SolverFactory('ipopt').solve(m).write()
discretizer = TransformationFactory('dae.collocation')
discretizer.apply_to(m, nfe =20, ncp=4)
model_plot(m)
m.r
m.T[1,0,0].value
#######
###curve fitting
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from scipy.optimize import curve_fit
from gekko import GEKKO
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
path=r"/Users/shaktimukker/Documents/MyProjects/projects/bacterial-growth-analysis/"
data_x_vs_t=pd.read_excel(path+"data.xlsx",sheet_name="x_vs_t")
# eq8
def fn_eq8(X,a1,a2,b0,b1,b2):
Y = X[:, 0]
T = X[:, 1]
Tmin,Tmax=18,45
#a1,a2,b0,b1,b2=para_eq8[0],para_eq8[1],para_eq8[2],para_eq8[3],para_eq8[4]
f=((a1*(T-Tmin)*(1-np.exp(a2*(T-Tmax))))**2)*(b0+b1*Y+b2*(Y**2))
return f
#eq9
def fn_eq9(X,a1,a2,b0,b1,b2):
Y = X[:, 0]
T = X[:, 1]
Tmin,Tmax=18,45
#a1,a2,b0,b1,b2=para_eq8[0],para_eq8[1],para_eq8[2],para_eq8[3],para_eq8[4]
f=(a1*(T-Tmin)*(1-np.exp(a2*(T-Tmax)))**2)*(b0+b1*Y+b2*(Y**2))
return f
# X vs t
def x_vs_t(X,Xm,Um):
t=X
return Xm/(1+((Xm/C.X0)-1)*math.exp(-Um*t))
# CO2 vs t
def co2_vs_t(X,Yxco2,Mco2):
Xm,Um=1,2
t=X
comn=1+((Xm/C.X0)-1)*math.exp(-Um*t)
RHS1=1/(Yxco2*comn)
RHS2=1/(Yxco2*(Xm/C.X0))
RHS3=(Mco2/Um)*math.log(comn/((Xm/C.X0)*math.exp(-Um*t)))
return C.CCP0+Xm*(RHS1-RHS2+RHS3)
def dco2dt_vs_t(X,Yxco2,Mco2):
Xm,Um=1,2
t=X
comn=1+((Xm/C.X0)-1)*math.exp(-Um*t)
RHS1=1/(Yxco2*comn)
RHS2=1/(Yxco2*(Xm/C.X0))
RHS3=(Mco2/Um)*math.log(comn/((Xm/C.X0)*math.exp(-Um*t)))
return C.CCP0+Xm*(RHS1-RHS2+RHS3)
guesses = [2.981,0.201,0.431,2.010,0.528]
guesses=[2.189, 0.284, 0.418, 1.220 ,0.867]
X=np.array(data[["Y","T"]].values.tolist())
f=data["Um"].values.tolist()
popt, pcov = curve_fit(fn_eq9, X, f, guesses)
sigma = np.sqrt(np.diag(pcov))
#plot
X = np.column_stack(X[:,0], X[:,1]) # independent variables
f = fn_eq8(X,*popt)
fig = plt.figure()
ax = fig.gca(projection = '3d')
ax.plot(X[:,0], X[:,1], f)
ax.set_xlabel('Y')
ax.set_ylabel('T')
ax.set_zlabel('f(Y,T)')
plt.savefig('images/graphical-mulvar-1.png')
## APM
%matplotlib inline
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
from scipy.optimize import curve_fit
from sklearn.metrics import r2_score
x=pd.read_csv("https://apmonitor.com/che263/uploads/Main/heart_rate.txt")
x.describe()
def bpm(t,c0,c1,c2,c3):
return c0+c1*t-c2*np.exp(-c3*t)
g=[100,.01,100,.01]
t=x["Time (sec)"].values
hr=x["Heart Rate (BPM)"].values
g,cov=curve_fit(bpm,t,hr,g)
y =[bpm(t,g[0],g[1],g[2],g[3]) for t in x["Time (sec)"]]
plt.plot(x["Time (sec)"],x["Heart Rate (BPM)"])
plt.plot(x["Time (sec)"],y,'r.')
print('R^2: ', r2_score(y,hr))
# x vs t
%matplotlib inline
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
from scipy.optimize import curve_fit
from sklearn.metrics import r2_score
path=r"/Users/shaktimukker/Documents/MyProjects/projects/bacterial-growth-analysis/"
data_x_vs_t=pd.read_excel(path+"data.xlsx",sheet_name="x_vs_t")
data_x_vs_t.describe()
# X vs t
def x_vs_t(X,Xm,Um):
t=X
return Xm/(1+((Xm/C.X0)-1)*np.exp(-Um*t))
#
dxdt vs time
def dXdt_vs_time(X,Xm,Um):
t=X
X1=x_vs_t(X,Xm,Um)
return Um*X1*(1-X1/Xm)
g=[184,.33]
t=data_x_vs_t["t"].values
X=data_x_vs_t["x"].values
g,cov=curve_fit(x_vs_t,t,X,g)
y =[x_vs_t(t,g[0],g[1]) for t in X]
plt.plot(t,X)
plt.plot(t,y,'r')
print('R^2: ', r2_score(X,y))
####### co2 vs t
%matplotlib inline
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
from scipy.optimize import curve_fit
from sklearn.metrics import r2_score
from sklearn.preprocessing import MinMaxScaler
path=r"/Users/shaktimukker/Documents/MyProjects/projects/bacterial-growth-analysis/"
data_co2_vs_t=pd.read_excel(path+"data.xlsx",sheet_name="co2_vs_t")
data_co2_vs_t.describe()
# CO2 vs t
def co2_vs_t(X,Yxco2,Mco2):
Xm,Um=184,.33
t=X
comn=1+((Xm/C.X0)-1)*np.exp(-Um*t)
RHS1=1/(Yxco2*comn)
RHS2=1/(Yxco2*(Xm/C.X0))
RHS3=(Mco2/Um)*np.log(comn/((Xm/C.X0)*np.exp(-Um*t)))
return C.CCP0+Xm*(RHS1-RHS2+RHS3)
def dco2dt_vs_t(X,Yxco2,Mco2,Xm,Um):
#Xm,Um=184,.33
t=X
comn=1+((Xm/C.X0)-1)*np.exp(-Um*t)
RHS1=Um/Yxco2
RHS2=RHS1*(1-1/(comn))
RHS3=RHS2+Mco2
return RHS3*Xm/comn
g=[2.60,.005]
t=data_co2_vs_t["t"].values
X=data_co2_vs_t["co2"].values
X=500*X
g,cov=curve_fit(co2_vs_t,t,X,g)
y =[co2_vs_t(t,g[0],g[1]) for t in X]
plt.plot(t,X)
plt.plot(t,y,'r')
print('R^2: ', r2_score(y,X))
#print('R^2: ', r2_score(X,y))
#### knapsk problem
from pyomo.environ import *
A = ['hammer', 'wrench', 'screwdriver', 'towel']
b = {'hammer':8, 'wrench':3, 'screwdriver':6, 'towel':11}
w = {'hammer':5, 'wrench':7, 'screwdriver':4, 'towel':3}
W_max = 14
model = ConcreteModel()
model.x = Var( A, within=Binary )
model.value = Objective(expr = sum( b[i]*model.x[i] for i in A),
sense = maximize )
model.weight = Constraint(expr = sum( w[i]*model.x[i] for i in A) <= W_max )
opt = SolverFactory('glpk')
result_obj = opt.solve(model, tee=True)
model.pprint()
# dxdt vs time
def dXdt_vs_time():
return C.Um*X(m)*(1-X(m)/C.Xm)
# dco2dt vs time
## example
from pyomo.environ import *
from pyomo.dae import *
pi=3.1416
m = ConcreteModel()
#m.pi = Param(initialize=pi)
m.t = ContinuousSet(bounds=(0,2))
m.x = ContinuousSet(bounds=(0,1))
m.y = ContinuousSet(bounds=(0,1))
m.u = Var(m.x,m.y,m.t)
m.dudx = DerivativeVar(m.u,wrt=m.x)
m.dudx2 = DerivativeVar(m.u,wrt=(m.x,m.x))
m.dudy = DerivativeVar(m.u,wrt=m.y)
m.dudy2 = DerivativeVar(m.u,wrt=(m.y,m.y))
m.dudt = DerivativeVar(m.u,wrt=m.t)
def _pde(m,i,j,k):
if i == 0 or i == 1 or j== 0 or j == 1 or k == 0 :
return Constraint.Skip
return pi**2*m.dudt[i,j,k] == m.dudx2[i,j,k]
m.pde = Constraint(m.x,m.y,m.t,rule=_pde)
def _initcon(m,i,j):
if i == 0 or i == 1 or j == 0 or j == 1:
return Constraint.Skip
return m.u[i,j,0] == sin(pi*i)
m.initcon = Constraint(m.x,m.y,rule=_initcon)
def _lowerboundx(m,j,k):
return m.u[0,j,k] == 0
m.lowerboundx = Constraint(m.t,m.y,rule=_lowerbound)
def _upperboundx(m,j,k):
return pi*exp(-j)+m.dudx[1,j,k] == 0
m.upperboundx = Constraint(m.t,rule=_upperbound)
def _lowerboundy(m,i,k):
return m.u[i,0,k] == 0
m.lowerboundy = Constraint(m.t,rule=_lowerbound)
def _upperboundy(m,j,k):
return pi*exp(-j)+m.dudx[i,1,k] == 0
m.upperboundy = Constraint(m.t,rule=_upperbound)
m.obj = Objective(expr=1)
# Discretize using Orthogonal Collocation
# discretizer = TransformationFactory('dae.collocation')
# discretizer.apply_to(m,nfe=10,ncp=3,wrt=m.x)
# discretizer.apply_to(m,nfe=20,ncp=3,wrt=m.t)
# Discretize using Finite Difference and Collocation
# =============================================================================
# discretizer = TransformationFactory('dae.finite_difference')
# discretizer2 = TransformationFactory('dae.collocation')
# discretizer.apply_to(m,nfe=25,wrt=m.x,scheme='BACKWARD')
# discretizer2.apply_to(m,nfe=20,ncp=3,wrt=m.t)
# =============================================================================
# Discretize using Finite Difference Method
discretizer = TransformationFactory('dae.finite_difference')
discretizer.apply_to(m,nfe=25,wrt=m.x,scheme='BACKWARD')
discretizer.apply_to(m,nfe=25,wrt=m.y,scheme='BACKWARD')
discretizer.apply_to(m,nfe=20,wrt=m.t,scheme='BACKWARD')
solver=SolverFactory('ipopt')
results = solver.solve(m,tee=True)
x = []
t = []
u = []
for i in sorted(m.x):
temp=[]
tempx = []
for j in sorted(m.t):
tempx.append(i)
temp.append(value(m.u[i,j]))
x.append(tempx)
t.append(sorted(m.t))
u.append(temp)
import numpy
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d.axes3d import Axes3D
fig = plt.figure()
ax = fig.add_subplot(1,1,1,projection='3d')
ax.set_xlabel('Distance x')
ax.set_ylabel('Time t')
p = ax.plot_wireframe(np.array(x),np.array(t),np.array(u),rstride=1,cstride=1)
fig.show()
############ m2
from pyomo.environ import *
from pyomo.dae import *
m = ConcreteModel()
m.time = ContinuousSet(bounds=(0,1))
m.x = ContinuousSet(bounds=(0,10))
m.y = ContinuousSet(bounds=(0,5))
m.T = Var(m.x,m.y,m.time)
m.u = Var(m.x,m.y,m.time)
m.T0 = Param(initialize=5)
m.TD = Param(m.x,m.y,initialize=25)
m.Ux0 = Param(initialize=10)
m.Uy5 = Param(initialize=15)
m.dTdx = DerivativeVar(m.T,wrt=m.x)
m.d2Tdx2 = DerivativeVar(m.T,wrt=(m.x,m.x))
m.dTdy = DerivativeVar(m.T,wrt=m.y)
m.d2Tdy2 = DerivativeVar(m.T,wrt=(m.y,m.y))
m.dTdt = DerivativeVar(m.T,wrt=m.time)
def _heateq(m,i,j,k):
return m.d2Tdx2[i,j,k] + m.d2Tdy2[i,j,k] + m.u[i,j,k] == m.dTdt[i,j,k]
m.heateq = Constraint(m.x,m.y,m.time,rule=_heateq)
def _initT(m,i,j):
return m.T[i,j,0] == m.T0
m.initT = Constraint(m.x,m.y,rule=_initT)
def _xbound(m,j,k):
return m.dTdx[0,j,k] == m.Ux0
m.xbound = Constraint(m.y,m.time,rule=_xbound)
def _ybound(m,i,k):
return m.dTdy[i,5,k] == m.Uy5
m.ybound = Constraint(m.x,m.time,rule=_ybound)
# def _intExp(m,i,j):
# return m.T[i,j,1] - m.TD[i,j]
# m.intExp = Expression(m.x,m.y,rule=_intExp)
# def _obj(m):
# return Integral(Integral(expr=m.intExp,wrt=m.x,bounds=(0,10)),
# wrt=m.y,bounds=(0,5))
# m.obj = Objective(rule=_obj)
m.obj = Objective(expr=1)
# Discretize using Orthogonal Collocation
# discretizer = TransformationFactory('dae.collocation')
# discretizer.apply_to(m,nfe=10,ncp=3,wrt=m.x)
# discretizer.apply_to(m,nfe=20,ncp=3,wrt=m.t)
# Discretize using Finite Difference and Collocation
# =============================================================================
# discretizer = TransformationFactory('dae.finite_difference')
# discretizer2 = TransformationFactory('dae.collocation')
# discretizer.apply_to(m,nfe=25,wrt=m.x,scheme='BACKWARD')
# discretizer2.apply_to(m,nfe=20,ncp=3,wrt=m.t)
# =============================================================================
# Discretize using Finite Difference Method
discretizer = TransformationFactory('dae.finite_difference')
discretizer.apply_to(m,nfe=25,wrt=m.x,scheme='BACKWARD')
discretizer.apply_to(m,nfe=25,wrt=m.y,scheme='BACKWARD')
discretizer.apply_to(m,nfe=20,wrt=m.time,scheme='BACKWARD')
solver=SolverFactory('ipopt')
results = solver.solve(m,tee=True)
out=[]
for i in sorted(m.x):
for j in sorted(m.y):
for k in sorted(m.time):
out.append([i,j,k,value(m.u[i,j,k])])
outDF=pd.DataFrame(out,columns=["x","y","t","u"])
outDF=outDF.sort_values(by="t")
### m3
from pyomo.environ import *
from pyomo.dae import *
m = ConcreteModel()
m.time = ContinuousSet(bounds=(0,1))
m.x = ContinuousSet(bounds=(0,10))
m.y = ContinuousSet(bounds=(0,5))
m.T = Var(m.x,m.y,m.time)
m.u = Var(m.x,m.y,m.time)
m.T0 = Param(initialize=5)
m.TD = Param(m.x,m.y,initialize=25)
m.Ux0 = Param(initialize=10)
m.Uy5 = Param(initialize=15)
m.dTdx = DerivativeVar(m.T,wrt=m.x)
m.d2Tdx2 = DerivativeVar(m.T,wrt=(m.x,m.x))
m.dTdy = DerivativeVar(m.T,wrt=m.y)
m.d2Tdy2 = DerivativeVar(m.T,wrt=(m.y,m.y))
m.dTdt = DerivativeVar(m.T,wrt=m.time)
def _heateq(m,i,j,k):
return m.d2Tdx2[i,j,k] + m.d2Tdy2[i,j,k] + m.u[i,j,k] == m.dTdt[i,j,k]
m.heateq = Constraint(m.x,m.y,m.time,rule=_heateq)
def _initT(m,i,j):
return m.T[i,j,0] == m.T0
m.initT = Constraint(m.x,m.y,rule=_initT)
def _xbound(m,j,k):
return m.dTdx[0,j,k] == m.Ux0
m.xbound = Constraint(m.y,m.time,rule=_xbound)
def _ybound(m,i,k):
return m.dTdy[i,5,k] == m.Uy5
m.ybound = Constraint(m.x,m.time,rule=_ybound)
# def _intExp(m,i,j):
# return m.T[i,j,1] - m.TD[i,j]
# m.intExp = Expression(m.x,m.y,rule=_intExp)
# def _obj(m):
# return Integral(Integral(expr=m.intExp,wrt=m.x,bounds=(0,10)),
# wrt=m.y,bounds=(0,5))
# m.obj = Objective(rule=_obj)
m.obj = Objective(expr=1)
# Discretize using Orthogonal Collocation
# discretizer = TransformationFactory('dae.collocation')
# discretizer.apply_to(m,nfe=10,ncp=3,wrt=m.x)
# discretizer.apply_to(m,nfe=20,ncp=3,wrt=m.t)
# Discretize using Finite Difference and Collocation
# =============================================================================
# discretizer = TransformationFactory('dae.finite_difference')
# discretizer2 = TransformationFactory('dae.collocation')
# discretizer.apply_to(m,nfe=25,wrt=m.x,scheme='BACKWARD')
# discretizer2.apply_to(m,nfe=20,ncp=3,wrt=m.t)
# =============================================================================
# Discretize using Finite Difference Method
discretizer = TransformationFactory('dae.finite_difference')
discretizer.apply_to(m,nfe=25,wrt=m.x,scheme='BACKWARD')
discretizer.apply_to(m,nfe=25,wrt=m.y,scheme='BACKWARD')
discretizer.apply_to(m,nfe=20,wrt=m.time,scheme='BACKWARD')
solver=SolverFactory('ipopt')
results = solver.solve(m,tee=True)
out=[]
for i in sorted(m.x):
for j in sorted(m.y):
for k in sorted(m.time):
out.append([i,j,k,value(m.u[i,j,k])])
outDF=pd.DataFrame(out,columns=["x","y","t","u"])
outDF=outDF.sort_values(by="t")
###### bed water
####### co2 vs t
data_co2_vs_t=pd.read_excel(path+"data.xlsx",sheet_name="co2_vs_t")
print(data_co2_vs_t.describe())
# CO2 vs t
# X vs t
def x_vs_t(t,Xm,Um):
try :
return Xm/(1+((Xm/C.X0)-1)*np.exp(-Um*t))
except Exception as e:
print("'x_vs_t' "+"method execution Falied")
print("Exception is ",str(e))
# dxdt vs time
def dXdt_vs_time(t,Xm,Um):
try:
X1=x_vs_t(t,Xm,Um)
return Um*X1*(1-X1/Xm)
except Exception as e:
print("'dXdt_vs_time' "+"method execution Falied")
print("Exception is ",str(e))
# dPdt vs time Generalise
def dPdt_vs_t(t,Yxp,Mp,Xm=183,Um=.33):
try :
#Xm,Um=184,.33
return Yxp*dXdt_vs_time(t,Xm,Um)+Mp*x_vs_t(t,Xm,Um)
except Exception as e:
print("'dPdt_vs_t' "+"method execution Falied")
print("Exception is ",str(e))
# dHdt vs time
def dHdt_vs_t(T):
try :
RHS1=.62413*C.b*C.p
RHS2=(T+C.c)**2
RHS3=C.d*np.exp(C.a-C.b/(T+C.c))
RHS4=(C.p/RHS3-1)**2
return RHS1/(RHS2*RHS4*RHS3)
except Exception as e:
print("'dHdt_vs_t' "+"method execution Falied")
print("Exception is ",str(e))
# dTdz vs time
def dTdz_vs_t(outDF):
try:
tempZ=outDF[(outDF["z"]==outDF["z"].max())&(outDF["r"]==outDF["r"].max())]['u']
tempZ0=outDF[(outDF["z"]==outDF["z"].min())&(outDF["r"]==outDF["r"].max())]['u']
return (np.array(tempZ)-np.array(tempZ0))/outDF["z"].max()
except Exception as e:
print("'dTdz_vs_t' "+"method execution Falied")
print("Exception is ",str(e))
# Revap vs time # outDF ['t', 'z', 'r', 'u']
def Revap_vs_t(outDF):
try :
dhdt=[dHdt_vs_t(temp) for temp in outDF.groupby("t")["u"].mean().values.tolist()]
return C.rhoA*C.Vz*C.V*dTdz_vs_t(outDF)*np.array(dhdt)
except Exception as e:
print("'Revap_vs_t' "+"method execution Falied")
print("Exception is ",str(e))
# dBWdt vs time
def dBWdt_vs_t(outDF):
try :
return np.array(dPdt_vs_t(outDF['t'].unique(),C.Yxw,C.Mw,C.Xm,C.Um))-np.array(Revap_vs_t(outDF))/C.IDS
except Exception as e:
print("'dBWdt_vs_t' "+"method execution Falied")
print("Exception is ",str(e))
# dco2dt vs time
t=data_co2_vs_t["t"].values
Yxp,Mp=3.3,.01
Xm,Um=183,.33
dPdt_vs_t(t,Yxp,Mp,Xm,Um)
# dwdt vs time
t=data_co2_vs_t["t"].values
Yxp,Mp=3.3,.01
Xm,Um=183,.33
y=dPdt_vs_t(t,Yxp,Mp,Xm,Um)
# dBWdt vs time
y=dBWdt_vs_t(outDF)
plt.plot(t,dPdt_vs_t(t,Yxp,Mp,Xm,Um),label='Estimated dW/dt')
plt.xlabel("Time (Hours)")
plt.ylabel("dW/dt")
plt.title("dW/dt vs time")
plt.legend()
plt.show()
plt.plot(outDF['t'].unique(),dBWdt_vs_t(outDF),label='Estimated dBW/dt')
plt.xlabel("Time (Hours)")
plt.ylabel("dBW/dt")
plt.title("dBW/dt' vs time")
plt.legend()
plt.show()
outDF.columns
outDF.rename(columns={"x":"z"},inplace=True)
outDF=outDF[['t','z', 'r', 'u']]
##
outDF['t'].nunique()
outDF['x'].nunique()
outDF['y'].nunique()
26*26*21
len(outDF)
################## m4
###### bed water
### Heat Equation
m = ConcreteModel()
m.t = ContinuousSet(bounds=(0,200))
m.z = ContinuousSet(bounds=(0,C.Z))
m.r = ContinuousSet(bounds=(0,C.R))
m.T = Var(m.z,m.r,m.t)
m.u = Var(m.z,m.r,m.t)
m.T0 = Param(initialize=5)
m.TD = Param(m.z,m.r,initialize=25)
m.Ux0 = Param(initialize=10)
m.Uy5 = Param(initialize=15)
m.dTdz = DerivativeVar(m.T,wrt=m.z)
m.d2Tdz2 = DerivativeVar(m.T,wrt=(m.z,m.z))
m.dTdr = DerivativeVar(m.T,wrt=m.r)
m.d2Tdr2 = DerivativeVar(m.T,wrt=(m.r,m.r))
m.dTdt = DerivativeVar(m.T,wrt=m.t)
def X(m):
return "C.Xm/(1+((C.Xm/C.X0)-1)*exp(-C.Um*k))"
def dXdt(m):
return "C.Um*{0}*(1-{1}/C.Xm)".format(X(m),X(m))
def dHdT1(m):
return ".62413*C.b*C.p"
def dHdT2(m):
return "(m.T[i,j,k]+C.c)**2"
def dHdT3(m):
return "C.d*exp(C.a-C.b/(m.T[i,j,k]+C.c))"
def dHdT4(m):
return "(C.p/{0}-1)**2".format(dHdT3(m))
def dHdT(m):
return "{0}/({1}*{2}*{3})".format(dHdT1(m),dHdT2(m),dHdT4(m),dHdT3(m))
def Cpb(m):
return "(C.epi*C.rhoA*(C.Cpa+C.lmbda*{0}+(1-C.epi)*C.rhoS*C.Cps))/C.rhoB".format(dHdT(m))
def heRHS(m):
return "C.rhoB*{0}*m.dTdt[i,j,k]".format(Cpb(m))
def heLHS1(m):
return "C.rhoS*(1-C.epi)*C.YQ*{0}".format(dXdt(m))
def heLHS2(m):
return "C.rhoA*C.Cpa*C.Vz*m.dTdz[i,j,k]"
def heLHS3(m):
return "C.rhoA*C.lmbda*C.Vz*{0}*m.dTdz[i,j,k]".format(dHdT(m))
def heLHS4(m):
return "C.Kb*m.d2Tdz2[i,j,k]"
def heLHS5(m):
return "C.Kb*m.dTdr[i,j,k]/(.01+j)+C.Kb*m.d2Tdr2[i,j,k]"
def _heateq(m,i,j,k):
return heLHS1(m)-heLHS2(m)-heLHS3(m)+heLHS4(m)+heLHS5(m) == f11(heRHS(m))
def _heateq(m,i,j,k):
return -heLHS3(m)+heLHS4(m)+heLHS5(m) == heRHS(m)
def _heateq(m,i,j,k):
eqString=heLHS1(m)+"-"+heLHS2(m)+"-"+heLHS3(m)+"+"+heLHS4(m)+"+"+heLHS5(m)+"=="+heRHS(m)
#eqString=heLHS1(m)+"-"+heLHS2(m)+"-"+heLHS3(m)+"+"+heLHS4(m)+"+"+heLHS5(m) +"=="+heRHS(m)
#eqString=" m.d2Tdz2[i,j,k] + m.d2Tdr2[i,j,k] + m.u[i,j,k] == "+heRHS(m)
#eqString=' m.d2Tdz2[i,j,k] + m.d2Tdr2[i,j,k] + m.u[i,j,k] == C.rhoB*(C.epi*C.rhoA*(C.Cpa+C.lmbda*.62413*C.b*C.p/((m.T[i,j,k]+C.c)**2*(C.p/C.d*exp(C.a-C.b/(m.T[i,j,k]+C.c))-1)**2*C.d*exp(C.a-C.b/(m.T[i,j,k]+C.c)))+(1-C.epi)*C.rhoS*C.Cps))/C.rhoB*m.dTdt[i,j,k]'
print(eqString)
#return m.d2Tdz2[i,j,k] + m.d2Tdr2[i,j,k] + m.u[i,j,k] == eval(heRHS(m))
return eval(eqString)
#def _heateq(m,i,j,k):
#return m.d2Tdx2[i,j,k] + m.d2Tdy2[i,j,k] + m.u[i,j,k] == m.dTdt[i,j,k]
m.heateq = Constraint(m.z,m.r,m.t,rule=_heateq)
m.d2Tdz2[i,j,k] + m.d2Tdr2[i,j,k] + m.u[i,j,k] == C.rhoB*(C.epi*C.rhoA*(C.Cpa+C.lmbda*.62413*C.b*C.p/((m.T[i,j,k]+C.c)**2*(C.p/C.d*np.exp(C.a-C.b/(m.T[i,j,k]+C.c))-1)**2*C.d*np.exp(C.a-C.b/(m.T[i,j,k]+C.c)))+(1-C.epi)*C.rhoS*C.Cps))/C.rhoB*m.dTdt[i,j,k]
m.ic = Constraint(m.z,m.r, rule=lambda m, z, r: m.T[0,z,r] == 27)
m.bc1 = Constraint(m.t,m.r, rule=lambda m, t,r: m.dTdz[t,C.Z,r] == 0 if t > 0 else Constraint.Skip)
m.bc2 = Constraint(m.t,m.r, rule=lambda m, t,r: m.T[t,0,r] == C.Ta)
m.bc3 = Constraint(m.t,m.z, rule=lambda m, t,z: m.dTdz[t,z,0] == 0 if t > 0 else Constraint.Skip)
m.bc4 = Constraint(m.t,m.z, rule=lambda m, t,z: C.Kb*m.dTdr[t,z,C.R]+C.h*(m.T[t,z,C.R]-C.Tj)==0)
def _initT(m,i,j):
return m.T[i,j,0] == C.T0
m.initT = Constraint(m.z,m.r,rule=_initT)
def _zl_bound(m,j,k):
return m.T[0,j,k] == C.Ta
m.zl_bound = Constraint(m.r,m.t,rule=_zl_bound)
def _zu_bound(m,j,k):
return m.dTdz[C.Z,j,k] == 0
m.zu_bound = Constraint(m.r,m.t,rule=_zu_bound)
def _rl_bound(m,i,k):
return m.dTdz[i,0,k] == 0
m.rl_bound = Constraint(m.z,m.t,rule=_rl_bound)
def _ru_bound(m,i,k):
return C.Kb*m.dTdr[i,C.R,k]+C.h*(m.T[i,C.R,k]-C.Tj)==0
m.ru_bound = Constraint(m.z,m.t,rule=_ru_bound)
m.obj = Objective(expr=1)
# Discretize using Finite Difference Method
discretizer = TransformationFactory('dae.finite_difference')
discretizer.apply_to(m,nfe=20,wrt=m.z,scheme='CENTRAL')
discretizer.apply_to(m,nfe=20,wrt=m.r,scheme='CENTRAL')
discretizer.apply_to(m,nfe=20,wrt=m.t,scheme='CENTRAL')
solver=SolverFactory('ipopt')
results = solver.solve(m,tee=True)
out=[]
for i in sorted(m.t):
for j in sorted(m.z):
for k in sorted(m.r):
out.append([i,j,k,value(m.u[j,k,i])])
outDF=pd.DataFrame(out,columns=["t","z","r","u"])
outDF=outDF.sort_values(by="t")
# X vs t
def x_vs_t(t,Xm,Um):
try :
return Xm/(1+((Xm/C.X0)-1)*np.exp(-Um*t))
except Exception as e:
print("'x_vs_t' "+"method execution Falied")
print("Exception is ",str(e))
# dxdt vs time
def dXdt_vs_time(t,Xm,Um):
try:
X1=x_vs_t(t,Xm,Um)
return Um*X1*(1-X1/Xm)
except Exception as e:
print("'dXdt_vs_time' "+"method execution Falied")
print("Exception is ",str(e))
# dPdt vs time Generalise
def dPdt_vs_t(t,Yxp,Mp,Xm=183,Um=.33):
try :
#Xm,Um=184,.33
return Yxp*dXdt_vs_time(t,Xm,Um)+Mp*x_vs_t(t,Xm,Um)
except Exception as e:
print("'dPdt_vs_t' "+"method execution Falied")
print("Exception is ",str(e))
# dHdt vs time
def dHdt_vs_t(T):
try :
RHS1=.62413*C.b*C.p
RHS2=(T+C.c)**2
RHS3=C.d*np.exp(C.a-C.b/(T+C.c))
RHS4=(C.p/RHS3-1)**2
return RHS1/(RHS2*RHS4*RHS3)
except Exception as e:
print("'dHdt_vs_t' "+"method execution Falied")
print("Exception is ",str(e))
# dTdz vs time
def dTdz_vs_t(outDF):
try:
tempZ=outDF[(outDF["z"]==outDF["z"].max())&(outDF["r"]==outDF["r"].max())]['u']
tempZ0=outDF[(outDF["z"]==outDF["z"].min())&(outDF["r"]==outDF["r"].max())]['u']
return (np.array(tempZ)-np.array(tempZ0))/outDF["z"].max()
except Exception as e:
print("'dTdz_vs_t' "+"method execution Falied")
print("Exception is ",str(e))
# Revap vs time # outDF ['t', 'z', 'r', 'u']
def Revap_vs_t(outDF):
try :
dhdt=[dHdt_vs_t(temp) for temp in outDF.groupby("t")["u"].mean().values.tolist()]
return C.rhoA*C.Vz*C.V*dTdz_vs_t(outDF)*np.array(dhdt)
except Exception as e:
print("'Revap_vs_t' "+"method execution Falied")
print("Exception is ",str(e))
# dBWdt vs time
def dBWdt_vs_t(outDF):
try :
return np.array(dPdt_vs_t(outDF['t'].unique(),C.Yxw,C.Mw,C.Xm,C.Um))-np.array(Revap_vs_t(outDF))/C.IDS
except Exception as e:
print("'dBWdt_vs_t' "+"method execution Falied")
print("Exception is ",str(e))
# dBWdt vs time
y=dBWdt_vs_t(outDF)
plt.plot(outDF['t'].unique(),dBWdt_vs_t(outDF),label='Estimated dBW/dt')
plt.xlabel("Time (Hours)")
plt.ylabel("dBW/dt")
plt.title("dBW/dt' vs time")
plt.legend()
plt.show()
## equation
m.d2Tdz2[i,j,k] + m.d2Tdr2[i,j,k] + m.u[i,j,k] ==
C.rhoB*(C.epi*C.rhoA*(C.Cpa+C.lmbda*.62413*C.b*C.p/((m.T[i,j,k]+C.c)**2*
(C.p/C.d*math.exp(C.a-C.b/(m.T[i,j,k]+C.c))-1)**2*
C.d*np.exp(C.a-C.b/(m.T[i,j,k]+C.c)))+(1-C.epi)*
C.rhoS*C.Cps))/C.rhoB*m.dTdt[i,j,k]
data = {'a':[600, 600, 600, 600, 600, 600, 600], 'b': ['a', 'b', 'b', 'a', 'a', 'a', 'a']}
data = pd.DataFrame(data)
test = data.loc[(data['a'] > 0) & (data['b']=='a'), 'a']
eqString=' m.d2Tdz2[i,j,k] + m.d2Tdr2[i,j,k] + m.u[i,j,k] == C.rhoB*(C.epi*C.rhoA*(C.Cpa+C.lmbda*.62413*C.b*C.p/((m.T[i,j,k]+C.c)**2*(C.p/C.d*math.exp(C.a-C.b/(m.T[i,j,k]+C.c))-1)**2*C.d*math.exp(C.a-C.b/(m.T[i,j,k]+C.c)))+(1-C.epi)*C.rhoS*C.Cps))/C.rhoB*m.dTdt[i,j,k]'
np.exp(test)
x2=5
np.exp([1,2])
math.exp(2)
df3=outDF[outDF.t==0]
df3.columns
pivot = df3.pivot_table(index=['z'], values=['r'], aggfunc='max')
| 29.897881
| 269
| 0.618902
| 6,106
| 31,034
| 3.085326
| 0.074353
| 0.016561
| 0.010829
| 0.018685
| 0.861564
| 0.823611
| 0.803705
| 0.776634
| 0.748925
| 0.737141
| 0
| 0.044719
| 0.139653
| 31,034
| 1,037
| 270
| 29.926712
| 0.660861
| 0
| 0
| 0.65488
| 0
| 0.009901
| 0.109408
| 0.046478
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.07355
| null | null | 0.048091
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7ce9d90709d88fbafeda91829b7281a0835d3dc0
| 210,170
|
py
|
Python
|
desktop/libs/dashboard/src/dashboard/tests.py
|
linwukang/hue
|
93a8c370713e70b216c428caa2f75185ef809deb
|
[
"Apache-2.0"
] | 1
|
2019-06-23T13:22:07.000Z
|
2019-06-23T13:22:07.000Z
|
desktop/libs/dashboard/src/dashboard/tests.py
|
zks888/hue
|
93a8c370713e70b216c428caa2f75185ef809deb
|
[
"Apache-2.0"
] | 4
|
2021-03-11T04:02:00.000Z
|
2022-03-27T08:31:56.000Z
|
desktop/libs/dashboard/src/dashboard/tests.py
|
zks888/hue
|
93a8c370713e70b216c428caa2f75185ef809deb
|
[
"Apache-2.0"
] | 1
|
2017-11-09T09:31:28.000Z
|
2017-11-09T09:31:28.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from django.contrib.auth.models import User
from django.urls import reverse
from nose.tools import assert_true, assert_false, assert_equal, assert_not_equal
from desktop.lib.django_test_util import make_logged_in_client
from desktop.lib.test_utils import grant_access
from desktop.lib.rest import resource
from desktop.models import Document2
from dashboard.facet_builder import _round_number_range
from dashboard.models import Collection2, augment_response
from dashboard.controller import DashboardController
QUERY = {'qs': [{'q': ''}], 'fqs': [], 'start': 0}
def test_ranges():
assert_equal((90, 100), _round_number_range(99))
assert_equal((0, 100), _round_number_range(100))
assert_equal((0, 100), _round_number_range(101))
assert_equal((8000000, 9000000), _round_number_range(9045352))
class MockResource():
RESPONSE = None
def __init__(self, client):
pass
@classmethod
def set_solr_response(cls, response):
MockResource.RESPONSE = response
def invoke(self, method, *args, **kwargs):
if method.lower() == 'head':
return self.head(*args, **kwargs)
elif method.lower() == 'get':
return self.get(*args, **kwargs)
else:
raise Exception('do not know how to handle %s' % method)
def head(self, *args, **kwargs):
return ''
def get(self, *args, **kwargs):
if 'collection_1/admin/file' in args[0]:
return SOLR_SCHEMA
elif 'collection_1/admin/luke' in args[0]:
if ('show', 'schema') in kwargs['params']:
return SOLR_LUKE_SCHEMA
else:
return SOLR_LUKE_
elif 'admin/collections' in args[0]:
return {'collections': ['collection_1'], 'aliases': []}
else:
return MockResource.RESPONSE
class TestSearchBase(object):
def setUp(self):
self.c = make_logged_in_client(username='test_dashboard', is_superuser=False)
self.client_not_me = make_logged_in_client(username="not_perm_user", groupname="default", recreate=True, is_superuser=False)
self.user = User.objects.get(username='test_dashboard')
self.user_not_me = User.objects.get(username="not_perm_user")
grant_access('test_dashboard', 'test_dashboard', 'dashboard')
grant_access(self.user.username, self.user.username, "desktop")
grant_access('not_perm_user', 'not_perm_user', 'dashboard')
grant_access(self.user_not_me.username, self.user_not_me.username, "desktop")
self.home_dir = Document2.objects.get_home_directory(user=self.user)
self.prev_resource = resource.Resource
resource.Resource = MockResource
self.collection = Collection2(user=self.user, name='collection_1')
MockResource.set_solr_response("""{
"responseHeader": {
"status": 0,
"QTime": 0,
"params": {
"indent": "true",
"q": "*:*",
"_": "1442953203972",
"wt": "json"
}
},
"response": {
"numFound": 1,
"start": 0,
"docs": [
{
"id": "change.me",
"title": [
"val1",
"val2",
"[val3]",
"val4"
],
"_version_": 1513046095083602000
}
]
}
}""")
def tearDown(self):
# Remove monkey patching
resource.Resource = self.prev_resource
class TestWithMockedSolr(TestSearchBase):
def _get_collection_param(self, collection):
col_json = json.loads(collection.get_json(self.user))
return col_json['collection']
def test_index(self):
response = self.c.get(reverse('dashboard:index'))
assert_true('dashboard' in response.content, response.content)
def test_share_dashboard(self):
doc = Document2.objects.create(name='test_dashboard', type='search-dashboard', owner=self.user,
data=self.collection.data, parent_directory=self.home_dir)
# owner can view document
response = self.c.get('/desktop/api2/doc/', {'uuid': doc.uuid})
data = json.loads(response.content)
assert_equal(doc.uuid, data['document']['uuid'], data)
# other user cannot view document
response = self.client_not_me.get('/desktop/api2/doc/', {'uuid': doc.uuid})
data = json.loads(response.content)
assert_equal(-1, data['status'])
# There are no collections with user_not_me
controller = DashboardController(self.user_not_me)
hue_collections = controller.get_search_collections()
assert_true(len(hue_collections) == 0)
# Share read perm by users
response = self.c.post("/desktop/api2/doc/share", {
'uuid': json.dumps(doc.uuid),
'data': json.dumps({
'read': {
'user_ids': [
self.user.id,
self.user_not_me.id
],
'group_ids': [],
},
'write': {
'user_ids': [],
'group_ids': [],
}
})
})
assert_equal(0, json.loads(response.content)['status'], response.content)
assert_true(doc.can_read(self.user))
assert_true(doc.can_write(self.user))
assert_true(doc.can_read(self.user_not_me))
assert_false(doc.can_write(self.user_not_me))
# other user can view document
response = self.client_not_me.get('/desktop/api2/doc/', {'uuid': doc.uuid})
data = json.loads(response.content)
assert_equal(doc.uuid, data['document']['uuid'], data)
# other user can open dashboard
response = self.c.post(reverse('dashboard:search'), {
'collection': json.dumps(self._get_collection_param(self.collection)),
'query': json.dumps(QUERY)
})
data = json.loads(response.content)
assert_true('response' in data, data)
assert_true('docs' in data['response'], data)
# For self.user_not_me
controller = DashboardController(self.user_not_me)
hue_collections = controller.get_search_collections()
assert_equal(len(hue_collections), 1)
assert_equal(hue_collections[0].name, 'test_dashboard')
hue_collections = controller.get_owner_search_collections()
assert_equal(len(hue_collections), 0)
hue_collections = controller.get_shared_search_collections()
assert_equal(len(hue_collections), 0)
# For self.user
controller = DashboardController(self.user)
hue_collections = controller.get_search_collections()
assert_equal(len(hue_collections), 1)
assert_equal(hue_collections[0].name, 'test_dashboard')
hue_collections = controller.get_owner_search_collections()
assert_equal(len(hue_collections), 1)
assert_equal(hue_collections[0].name, 'test_dashboard')
hue_collections = controller.get_shared_search_collections()
assert_equal(len(hue_collections), 1)
assert_equal(hue_collections[0].name, 'test_dashboard')
user_not_me_home_dir = Document2.objects.get_home_directory(user=self.user_not_me)
doc1 = Document2.objects.create(name='test_dashboard1', type='search-dashboard', owner=self.user_not_me,
data=self.collection.data, parent_directory=user_not_me_home_dir)
# self.user_not_me can view document
response = self.client_not_me.get('/desktop/api2/doc/', {'uuid': doc1.uuid})
data = json.loads(response.content)
assert_equal(doc1.uuid, data['document']['uuid'], data)
# self.user cannot view document
response = self.c.get('/desktop/api2/doc/', {'uuid': doc1.uuid})
data = json.loads(response.content)
assert_equal(-1, data['status'])
# Share read perm by users
response = self.client_not_me.post("/desktop/api2/doc/share", {
'uuid': json.dumps(doc1.uuid),
'data': json.dumps({
'read': {
'user_ids': [
self.user.id,
],
'group_ids': [],
},
'write': {
'user_ids': [],
'group_ids': [],
}
})
})
assert_equal(0, json.loads(response.content)['status'], response.content)
assert_true(doc1.can_read(self.user))
assert_false(doc1.can_write(self.user))
assert_true(doc1.can_read(self.user_not_me))
assert_true(doc1.can_write(self.user_not_me))
# For self.user_not_me
controller = DashboardController(self.user_not_me)
hue_collections = controller.get_search_collections()
assert_equal(len(hue_collections), 2)
hue_collections = controller.get_owner_search_collections()
assert_equal(len(hue_collections), 1)
assert_equal(hue_collections[0].name, 'test_dashboard1')
hue_collections = controller.get_shared_search_collections()
assert_equal(len(hue_collections), 1)
assert_equal(hue_collections[0].name, 'test_dashboard1')
# For self.user
controller = DashboardController(self.user)
hue_collections = controller.get_search_collections()
assert_equal(len(hue_collections), 2)
hue_collections = controller.get_owner_search_collections()
assert_equal(len(hue_collections), 1)
assert_equal(hue_collections[0].name, 'test_dashboard')
hue_collections = controller.get_shared_search_collections()
assert_equal(len(hue_collections), 1)
assert_equal(hue_collections[0].name, 'test_dashboard')
def test_update_document(self):
# Regular user
response = self.c.post(reverse('dashboard:update_document'), {
'collection': json.dumps(self._get_collection_param(self.collection)),
'document': json.dumps({'hasChanged': False})
})
data = json.loads(response.content)
assert_equal(0, data['status'], response.content)
assert_true('no modifications to change' in data['message'], response.content)
# Admin
c = make_logged_in_client(username='admin', is_superuser=True, recreate=True)
response = c.post(reverse('dashboard:update_document'), {
'collection': json.dumps(self._get_collection_param(self.collection)),
'document': json.dumps({'hasChanged': False})
})
data = json.loads(response.content)
assert_equal(0, data['status'], response.content)
assert_true('no modifications to change' in data['message'], response.content)
def test_strip_nulls(self):
response = '{"uid":"1111111","method":"check_user"}\x00'
response = json.loads(response.replace('\x00', '')) # Does not call real API
def test_convert_schema_fields_to_luke(self):
schema_fields = {u'fields': [
{u'indexed': True, u'stored': True, u'type': u'long', u'name': u'_version_'},
{u'indexed': True, u'stored': True, u'required': True, u'type': u'tdate', u'name': u'created_at'},
{u'indexed': True, u'stored': True, u'required': True, u'type': u'string', u'name': u'expanded_url'},
{u'uniqueKey': True, u'name': u'id', u'required': True, u'stored': True, u'indexed': True, u'type': u'tlong'},
{u'indexed': True, u'stored': True, u'required': True, u'type': u'tlong', u'name': u'in_reply_to_status_id'},
{u'indexed': True, u'stored': True, u'required': True, u'type': u'tint', u'name': u'in_reply_to_user_id'},
{u'indexed': True, u'stored': True, u'required': True, u'type': u'string', u'name': u'media_url_https'},
{u'indexed': True, u'stored': True, u'required': True, u'type': u'tint', u'name': u'retweet_count'},
{u'indexed': True, u'stored': True, u'required': True, u'type': u'text_general', u'name': u'source'},
{u'indexed': True, u'stored': True, u'required': True, u'type': u'text_general', u'name': u'text'},
{u'indexed': True, u'stored': True, u'required': True, u'type': u'tint', u'name': u'user_followers_count'},
{u'indexed': True, u'stored': True, u'required': True, u'type': u'tint', u'name': u'user_friends_count'},
{u'indexed': True, u'stored': True, u'required': True, u'type': u'string', u'name': u'user_location'},
{u'indexed': True, u'stored': True, u'required': True, u'type': u'text_general', u'name': u'user_name'},
{u'indexed': True, u'stored': True, u'required': True, u'type': u'string', u'name': u'user_screen_name'},
{u'indexed': True, u'stored': True, u'required': True, u'type': u'tint', u'name': u'user_statuses_count'}
], u'responseHeader': {u'status': 0, u'QTime': 1}
}
assert_equal([
{'uniqueKey': None, 'copySources': [], 'flags': u'I-S-----OF-----l', 'required': True, 'type': u'long', u'copyDests': []},
{'uniqueKey': None, 'copySources': [], 'flags': u'I-S-----OF-----l', 'required': True, 'type': u'string', u'copyDests': []},
{'uniqueKey': None, 'copySources': [], 'flags': u'I-S-----OF-----l', 'required': True, 'type': u'string', u'copyDests': []},
{'uniqueKey': None, 'copySources': [], 'flags': u'I-S-----OF-----l', 'required': True, 'type': u'string', u'copyDests': []},
{'uniqueKey': None, 'copySources': [], 'flags': u'I-S-----OF-----l', 'required': True, 'type': u'string', u'copyDests': []},
{'uniqueKey': None, 'copySources': [], 'flags': u'I-S-----OF-----l', 'required': True, 'type': u'tdate', u'copyDests': []},
{'uniqueKey': None, 'copySources': [], 'flags': u'I-S-----OF-----l', 'required': True, 'type': u'text_general', u'copyDests': []},
{'uniqueKey': None, 'copySources': [], 'flags': u'I-S-----OF-----l', 'required': True, 'type': u'text_general', u'copyDests': []},
{'uniqueKey': None, 'copySources': [], 'flags': u'I-S-----OF-----l', 'required': True, 'type': u'text_general', u'copyDests': []},
{'uniqueKey': None, 'copySources': [], 'flags': u'I-S-----OF-----l', 'required': True, 'type': u'tint', u'copyDests': []},
{'uniqueKey': None, 'copySources': [], 'flags': u'I-S-----OF-----l', 'required': True, 'type': u'tint', u'copyDests': []},
{'uniqueKey': None, 'copySources': [], 'flags': u'I-S-----OF-----l', 'required': True, 'type': u'tint', u'copyDests': []},
{'uniqueKey': None, 'copySources': [], 'flags': u'I-S-----OF-----l', 'required': True, 'type': u'tint', u'copyDests': []},
{'uniqueKey': None, 'copySources': [], 'flags': u'I-S-----OF-----l', 'required': True, 'type': u'tint', u'copyDests': []},
{'uniqueKey': None, 'copySources': [], 'flags': u'I-S-----OF-----l', 'required': True, 'type': u'tlong', u'copyDests': []},
{'uniqueKey': True, 'copySources': [], 'flags': u'I-S-----OF-----l', 'required': True, 'type': u'tlong', u'copyDests': []}
],
sorted(Collection2._make_luke_from_schema_fields(schema_fields).values())
)
def test_response_escaping_multi_value(self):
MockResource.set_solr_response("""{
"responseHeader": {
"status": 0,
"QTime": 0,
"params": {
"indent": "true",
"q": "*:*",
"_": "1442953203972",
"wt": "json"
}
},
"response": {
"numFound": 1,
"start": 0,
"docs": [
{
"id": "change.me",
"title": [
"val1",
"val2",
"[<script>alert(123)</script>]",
"val4"
],
"_version_": 1513046095083602000
}
]
}
}""")
response = self.c.post(reverse('dashboard:search'), {
'collection': json.dumps(self._get_collection_param(self.collection)),
'query': json.dumps(QUERY)
})
result = json.loads(response.content)
assert_equal(
[{'hueId': 'change.me', 'id': 'change.me', '_version_': 1513046095083602000, 'title': ['val1', 'val2', '[<script>alert(123)</script>]', 'val4'], 'details': [], 'externalLink': None}],
result['response']['docs']
)
def test_response_with_facets(self):
MockResource.set_solr_response("""{"responseHeader":{"status":0,"QTime":59,"params":{"facet":"true","facet.mincount":"1","facet.limit":"100","facet.date":"article_date","f.article_date.facet.date.start":"NOW-7MONTH/DAYS","wt":"json","rows":"15","user.name":"hue","start":"0","facet.sort":"count","q":"*:*","f.article_date.facet.date.end":"NOW-5MONTH","doAs":"romain","f.article_date.facet.date.gap":"+1DAYS","facet.field":["journal_title","author_facet"],"fq":["article_date:[2013-06-13T00:00:00Z TO 2013-06-13T00:00:00Z+1DAYS]","journal_title:\\"in\\""]}},"response":{"numFound":4,"start":0,"maxScore":1.0,"docs":[{"article_title":"Investigations for neonatal seizures.","journal_issn":"1878-0946","article_abstract_text":["Seizures during the neonatal period are always medical emergencies. Apart from the need for rapid anticonvulsive treatment, the underlying condition is often not immediately obvious. In the search for the correct diagnosis, a thorough history, clinical examination, laboratory work-up, neurophysiological and neuroradiological investigations are all essential. A close collaboration between neonatologists, neuropaediatricians, laboratory specialists, neurophysiologists and radiologists facilitates the adequate care of the infant."],"ontologies":["36481|1 "],"article_date":"2013-06-13T00:00:00Z","journal_title":"Seminars in fetal & neonatal medicine","date_created":"2013-08-22T00:00:00Z","journal_country":"Netherlands","journal_iso_abbreviation":"Semin Fetal Neonatal Med","id":"23680099","author":["B B Hallberg","M M Blennow"],"article_pagination":"196-201","journal_publication_date":"2013-08-22T00:00:00Z","affiliation":"Department of Neonatology, Karolinska Institutet and University Hospital, Stockholm, Sweden. boubou.hallberg@ki.se","language":"eng","_version_":1450807641462800385},{"article_title":"Enantiomeric selection properties of β-homoDNA: enhanced pairing for heterochiral complexes.","journal_issn":"1521-3773","article_date":"2013-06-13T00:00:00Z","journal_title":"Angewandte Chemie (International ed. in English)","date_created":"2013-07-20T00:00:00Z","journal_country":"Germany","journal_iso_abbreviation":"Angew. Chem. Int. Ed. Engl.","id":"23670912","author":["Daniele D D'Alonzo","Jussara J Amato","Guy G Schepers","Matheus M Froeyen","Arthur A Van Aerschot","Piet P Herdewijn","Annalisa A Guaragna"],"article_pagination":"6662-5","journal_publication_date":"2013-06-24T00:00:00Z","affiliation":"Dipartimento di Scienze Chimiche, Università degli Studi di Napoli Federico II, Via Cintia 21, 80126 Napoli, Italy. dandalonzo@unina.it","language":"eng","_version_":1450807661929955329},{"article_title":"Interference of bacterial cell-to-cell communication: a new concept of antimicrobial chemotherapy breaks antibiotic resistance.","journal_issn":"1664-302X","article_abstract_text":["Bacteria use a cell-to-cell communication activity termed \\"quorum sensing\\" to coordinate group behaviors in a cell density dependent manner. Quorum sensing influences the expression profile of diverse genes, including antibiotic tolerance and virulence determinants, via specific chemical compounds called \\"autoinducers\\". During quorum sensing, Gram-negative bacteria typically use an acylated homoserine lactone (AHL) called autoinducer 1. Since the first discovery of quorum sensing in a marine bacterium, it has been recognized that more than 100 species possess this mechanism of cell-to-cell communication. In addition to being of interest from a biological standpoint, quorum sensing is a potential target for antimicrobial chemotherapy. This unique concept of antimicrobial control relies on reducing the burden of virulence rather than killing the bacteria. It is believed that this approach will not only suppress the development of antibiotic resistance, but will also improve the treatment of refractory infections triggered by multi-drug resistant pathogens. In this paper, we review and track recent progress in studies on AHL inhibitors/modulators from a biological standpoint. It has been discovered that both natural and synthetic compounds can disrupt quorum sensing by a variety of means, such as jamming signal transduction, inhibition of signal production and break-down and trapping of signal compounds. We also focus on the regulatory elements that attenuate quorum sensing activities and discuss their unique properties. Understanding the biological roles of regulatory elements might be useful in developing inhibitor applications and understanding how quorum sensing is controlled."],"ontologies":["2402|1 ","1875|1 ","2047|3 ","36690|1 ","8120|1 ","1872|1 ","1861|1 ","1955|2 ","38027|1 ","3853|1 ","2237|3 ","37074|1 ","3043|2 ","36478|1 ","4403|1 ","2751|1 ","10751|1 ","36467|1 ","2387|1 ","7278|3 ","3826|1 "],"article_date":"2013-06-13T00:00:00Z","journal_title":"Frontiers in microbiology","date_created":"2013-06-30T00:00:00Z","journal_country":"Switzerland","journal_iso_abbreviation":"Front Microbiol","id":"23720655","author":["Hidetada H Hirakawa","Haruyoshi H Tomita"],"article_pagination":"114","journal_publication_date":"2013-09-13T00:00:00Z","affiliation":"Advanced Scientific Research Leaders Development Unit, Gunma University Maebashi, Gunma, Japan.","language":"eng","_version_":1450807662055784448},{"article_title":"The role of musical training in emergent and event-based timing.","journal_issn":"1662-5161","article_abstract_text":["Introduction: Musical performance is thought to rely predominantly on event-based timing involving a clock-like neural process and an explicit internal representation of the time interval. Some aspects of musical performance may rely on emergent timing, which is established through the optimization of movement kinematics, and can be maintained without reference to any explicit representation of the time interval. We predicted that musical training would have its largest effect on event-based timing, supporting the dissociability of these timing processes and the dominance of event-based timing in musical performance. Materials and Methods: We compared 22 musicians and 17 non-musicians on the prototypical event-based timing task of finger tapping and on the typically emergently timed task of circle drawing. For each task, participants first responded in synchrony with a metronome (Paced) and then responded at the same rate without the metronome (Unpaced). Results: Analyses of the Unpaced phase revealed that non-musicians were more variable in their inter-response intervals for finger tapping compared to circle drawing. Musicians did not differ between the two tasks. Between groups, non-musicians were more variable than musicians for tapping but not for drawing. We were able to show that the differences were due to less timer variability in musicians on the tapping task. Correlational analyses of movement jerk and inter-response interval variability revealed a negative association for tapping and a positive association for drawing in non-musicians only. Discussion: These results suggest that musical training affects temporal variability in tapping but not drawing. Additionally, musicians and non-musicians may be employing different movement strategies to maintain accurate timing in the two tasks. These findings add to our understanding of how musical training affects timing and support the dissociability of event-based and emergent timing modes."],"ontologies":["36810|1 ","49002|1 ","3132|1 ","3797|1 ","37953|1 ","36563|2 ","524|1 ","3781|1 ","2848|1 ","17163|1 ","17165|1 ","49010|1 ","36647|3 ","36529|1 ","2936|1 ","2643|1 ","714|1 ","3591|1 ","2272|1 ","3103|1 ","2265|1 ","37051|1 ","3691|1 "],"article_date":"2013-06-14T00:00:00Z","journal_title":"Frontiers in human neuroscience","date_created":"2013-06-29T00:00:00Z","journal_country":"Switzerland","journal_iso_abbreviation":"Front Hum Neurosci","id":"23717275","author":["L H LH Baer","J L N JL Thibodeau","T M TM Gralnick","K Z H KZ Li","V B VB Penhune"],"article_pagination":"191","journal_publication_date":"2013-09-13T00:00:00Z","affiliation":"Department of Psychology, Centre for Research in Human Development, Concordia University Montréal, QC, Canada.","language":"eng","_version_":1450807667479019520}]},"facet_counts":{"facet_queries":{},"facet_fields":{"journal_title":["in",4,"frontiers",2,"angewandte",1,"chemie",1,"ed",1,"english",1,"fetal",1,"human",1,"international",1,"medicine",1,"microbiology",1,"neonatal",1,"neuroscience",1,"seminars",1],"author_facet":["Annalisa A Guaragna",1,"Arthur A Van Aerschot",1,"B B Hallberg",1,"Daniele D D'Alonzo",1,"Guy G Schepers",1,"Haruyoshi H Tomita",1,"Hidetada H Hirakawa",1,"J L N JL Thibodeau",1,"Jussara J Amato",1,"K Z H KZ Li",1,"L H LH Baer",1,"M M Blennow",1,"Matheus M Froeyen",1,"Piet P Herdewijn",1,"T M TM Gralnick",1,"V B VB Penhune",1]},"facet_dates":{"article_date":{"gap":"+1DAYS","start":"2013-04-27T00:00:00Z","end":"2013-06-28T00:00:00Z"}},"facet_ranges":{}},"highlighting":{"23680099":{},"23670912":{},"23720655":{},"23717275":{}},"spellcheck":{"suggestions":["correctlySpelled",false]}}""")
# journal_title facet + date range article_date facets clicked and author_facet not clicked
# http://solr:8983/solr/articles/select?user.name=hue&doAs=romain&q=%2A%3A%2A&wt=json&rows=15&start=0&facet=true&facet.mincount=1&facet.limit=100&facet.sort=count&facet.field=journal_title&facet.field=author_facet&facet.date=article_date&f.article_date.facet.date.start=NOW-7MONTH%2FDAYS&f.article_date.facet.date.end=NOW-5MONTH&f.article_date.facet.date.gap=%2B1DAYS&fq=article_date%3A%5B2013-06-13T00%3A00%3A00Z+TO+2013-06-13T00%3A00%3A00Z%2B1DAYS%5D&fq=journal_title%3A%22in%22
response = self.c.post(reverse('dashboard:search'), {
'collection': json.dumps(self._get_collection_param(self.collection)),
'query': json.dumps(QUERY)
})
assert_false('alert alert-error' in response.content, response.content)
assert_true('author_facet' in response.content, response.content)
assert_true('Annalisa A Guaragna' in response.content, response.content)
assert_true('journal_title' in response.content, response.content)
assert_true('Angewandte' in response.content, response.content)
assert_true('"numFound": 4' in response.content, response.content)
def test_response_highlighting_with_binary_value(self):
MockResource.set_solr_response("""{"responseHeader":{"status":0,"QTime":23,"params":{"hl.fragsize":"1000","fl":"*","hl.snippets":"5","start":"0","user.name":"hue","q":"*:*","doAs":"romain","hl.fl":"*","wt":"json","hl":"true","rows":"2"}},"response":{"numFound":494,"start":0,"docs":[{"id":"#31;�#8;w)�U#3;333320442�#2;�#27;�v","last_name":"Ogh","gpa":"3.88","first_name":"Eirjish","age":"12","_version_":1508697786597507072},{"id":"#31;�#8;w)�U#3;344�457�4�#2;r��","last_name":"Ennjth","gpa":"1.22","first_name":"Oopob","age":"14","_version_":1508697786815610880}]},"facet_counts":{"facet_queries":{},"facet_fields":{"id":["31",485,"8",485,"u",485,"2",461,"x",308,"w",145,"3",123,"4",90,"3;3",81,"0",76,"y",46,"41",15,"16",14,"42",14,"05",12,"7",12,"04",11,"15",11,"3;31",11,"44",11,"45",11,"i",11,"n",11,"s",11,"03",10,"07",10,"11",10,"28",10,"30",10,"3;34",10,"46",10,"a",10,"c",10,"j",10,"v",10,"02",9,"1",9,"26",9,"6",9,"e",9,"f",9,"p",9,"z",9,"00",8,"06",8,"14",8,"43",8,"g",8,"h",8,"r",8,"20",7,"23",7,"29",7,"3;37",7,"40",7,"k",7,"01",6,"17",6,"22",6,"24",6,"27",6,"3;35",6,"3;36",6,"b",6,"12",5,"19",5,"21",5,"3;323",5,"3;33",5,"47",5,"5",5,"o",5,"18",4,"25",4,"2;6",4,"3;32",4,"3;360",4,"3;372",4,"d",4,"q",4,"t",4,"005",3,"2;3",3,"3;311",3,"3;343",3,"3;344",3,"3;373",3,"420",3,"471",3,"9",3,"l",3,"m",3,"0147",2,"020",2,"022",2,"031",2,"065",2,"070",2,"2;0",2,"2;5",2],"first_name":["unt",3,"at",2,"aut",2,"eigh",2,"jh",2,"jir",2,"jz",2,"oim",2,"oith",2,"onn",2,"ouz",2,"um",2,"veitt",2,"16",1,"21",1,"28",1,"30",1,"achunn",1,"ad",1,"agauz",1,"agur",1,"aibenn",1,"aich",1,"aichaum",1,"aigh",1,"aim",1,"aimoob",1,"ainn",1,"aipf",1,"aipfouv",1,"aisainn",1,"aistjs",1,"aith",1,"aitoum",1,"aittool",1,"aittoupf",1,"aiw",1,"ak",1,"al",1,"apf",1,"astjist",1,"ataiv",1,"att",1,"auchav",1,"auchib",1,"auchih",1,"aud",1,"audaush",1,"auh",1,"auhour",1,"aum",1,"aunnoiss",1,"aunopf",1,"aupev",1,"aus",1,"ausaust",1,"austour",1,"ausyv",1,"auth",1,"authep",1,"auttjich",1,"auttjir",1,"av",1,"besooz",1,"bjfautt",1,"bjichaub",1,"bjittyl",1,"bjtoopf",1,"bleiss",1,"blistoot",1,"blittaub",1,"bljip",1,"bljir",1,"bloich",1,"bluhaid",1,"bluth",1,"breirjd",1,"breiter",1,"breitt",1,"breth",1,"brjishaip",1,"broil",1,"broopfoul",1,"brooputt",1,"brooroog",1,"brot",1,"brych",1,"brykaub",1,"brypfop",1,"bunn",1,"byroigh",1,"c",1,"caugh",1,"cautt",1,"chaittoif",1,"chaupour",1,"chautoonn",1,"chech",1,"cheigh",1,"chet",1],"last_name":["it",3,"ooz",3,"yss",3,"aih",2,"aim",2,"ash",2,"foum",2,"ig",2,"jch",2,"jif",2,"jis",2,"jiv",2,"jiw",2,"js",2,"oh",2,"ouf",2,"uch",2,"ud",2,"uf",2,"ul",2,"ush",2,"ys",2,"ab",1,"ach",1,"afoust",1,"aghaush",1,"aib",1,"aihjiss",1,"aimoint",1,"ain",1,"aineip",1,"ainn",1,"aint",1,"aintuf",1,"aipfes",1,"aipfjf",1,"air",1,"aish",1,"aishoott",1,"aishutt",1,"aisjnn",1,"aisseih",1,"aissutt",1,"aistaif",1,"aith",1,"aithjib",1,"aiv",1,"aiw",1,"aiz",1,"aizyb",1,"alyk",1,"ap",1,"apf",1,"apount",1,"assyv",1,"ast",1,"at",1,"atook",1,"att",1,"audal",1,"aug",1,"auk",1,"auloost",1,"aupfoitt",1,"aupjish",1,"aur",1,"aus",1,"authood",1,"auttyst",1,"auvjb",1,"auvon",1,"auzigh",1,"az",1,"besh",1,"birus",1,"bjit",1,"bjz",1,"blaich",1,"blaipf",1,"bleiz",1,"blikjigh",1,"bloob",1,"blouth",1,"boobjist",1,"boontoih",1,"boub",1,"bouch",1,"braul",1,"braut",1,"breinnyz",1,"brishoog",1,"brithith",1,"brjint",1,"brjth",1,"brubeist",1,"brugh",1,"bryvaip",1,"byl",1,"caleid",1,"ceir",1],"age":["12",60,"18",57,"14",56,"10",54,"11",53,"13",52,"16",50,"15",49,"17",44],"gpa":["2.34",6,"1.01",5,"1.43",5,"3.04",5,"3.14",5,"3.17",5,"3.87",5,"1.61",4,"2.24",4,"2.73",4,"2.76",4,"2.97",4,"3.28",4,"3.29",4,"3.35",4,"3.39",4,"3.67",4,"3.78",4,"3.85",4,"1.05",3,"1.1",3,"1.13",3,"1.22",3,"1.25",3,"1.3",3,"1.34",3,"1.37",3,"1.38",3,"1.39",3,"1.4",3,"1.44",3,"1.46",3,"1.53",3,"1.54",3,"1.55",3,"1.67",3,"1.72",3,"1.82",3,"1.91",3,"1.93",3,"11.0",3,"2.09",3,"2.11",3,"2.23",3,"2.26",3,"2.29",3,"2.46",3,"2.62",3,"2.71",3,"2.78",3,"2.79",3,"2.83",3,"2.84",3,"2.85",3,"2.92",3,"3.09",3,"3.11",3,"3.13",3,"3.23",3,"3.44",3,"3.76",3,"3.82",3,"3.88",3,"3.89",3,"3.92",3,"3.97",3,"4.0",3,"1.02",2,"1.11",2,"1.23",2,"1.26",2,"1.28",2,"1.35",2,"1.48",2,"1.56",2,"1.59",2,"1.63",2,"1.79",2,"1.8",2,"1.81",2,"1.97",2,"16.0",2,"2.01",2,"2.03",2,"2.05",2,"2.08",2,"2.12",2,"2.14",2,"2.17",2,"2.2",2,"2.25",2,"2.3",2,"2.35",2,"2.36",2,"2.41",2,"2.47",2,"2.49",2,"2.51",2,"2.54",2,"2.56",2],"date1":[],"date2":[],"country":[],"state":[],"city":[],"latitude":[],"longitude":[]},"facet_dates":{},"facet_ranges":{},"facet_intervals":{}},"highlighting":{"#31;�#8;w)�U#3;333320442�#2;�#27;�v":{},"#31;�#8;w)�U#3;344�457�4�#2;r��":{}}}""")
response = self.c.post(reverse('dashboard:search'), {
'collection': json.dumps(self._get_collection_param(self.collection)),
'query': json.dumps(QUERY)
})
assert_false('alert alert-error' in response.content, response.content)
assert_false("'ascii' codec can't encode character u'\ufffd' in position" in response.content, response.content)
assert_true('bluhaid' in response.content, response.content)
def test_get_collection_fields(self):
MockResource.set_solr_response("""{"responseHeader":{"status":0,"QTime":8},"index":{"numDocs":8,"maxDoc":8,"deletedDocs":0,"version":15,"segmentCount":5,"current":true,"hasDeletions":false,"directory":"org.apache.lucene.store.NRTCachingDirectory:NRTCachingDirectory(org.apache.solr.store.hdfs.HdfsDirectory@5efe087b lockFactory=org.apache.solr.store.hdfs.HdfsLockFactory@5106def2; maxCacheMB=192.0 maxMergeSizeMB=16.0)","userData":{"commitTimeMSec":"1389233070579"},"lastModified":"2014-01-09T02:04:30.579Z"},"fields":{"_version_":{"type":"long","schema":"ITS-----OF------","index":"-TS-------------","docs":8,"distinct":8,"topTerms":["1456716393276768256",1,"1456716398067712000",1,"1456716401465098240",1,"1460689159964327936",1,"1460689159981105152",1,"1460689159988445184",1,"1460689159993688064",1,"1456716273606983680",1],"histogram":["1",8]},"cat":{"type":"string","schema":"I-S-M---OF-----l","index":"ITS-----OF------","docs":4,"distinct":1,"topTerms":["currency",4],"histogram":["1",0,"2",0,"4",1]},"features":{"type":"text_general","schema":"ITS-M-----------","index":"ITS-------------","docs":4,"distinct":3,"topTerms":["coins",4,"notes",4,"and",4],"histogram":["1",0,"2",0,"4",3]},"id":{"type":"string","schema":"I-S-----OF-----l","index":"ITS-----OF------","docs":8,"distinct":8,"topTerms":["GBP",1,"NOK",1,"USD",1,"change.me",1,"change.me1",1,"change.me112",1,"change.me12",1,"EUR",1],"histogram":["1",8]},"inStock":{"type":"boolean","schema":"I-S-----OF-----l","index":"ITS-----OF------","docs":4,"distinct":1,"topTerms":["true",4],"histogram":["1",0,"2",0,"4",1]},"manu":{"type":"text_general","schema":"ITS-----O-------","index":"ITS-----O-------","docs":4,"distinct":7,"topTerms":["of",2,"bank",2,"european",1,"norway",1,"u.k",1,"union",1,"america",1],"histogram":["1",5,"2",2]},"manu_exact":{"type":"string","schema":"I-------OF-----l","index":"(unstored field)","docs":4,"distinct":4,"topTerms":["Bank of Norway",1,"European Union",1,"U.K.",1,"Bank of America",1],"histogram":["1",4]},"manu_id_s":{"type":"string","schema":"I-S-----OF-----l","dynamicBase":"*_s","index":"ITS-----OF------","docs":4,"distinct":4,"topTerms":["eu",1,"nor",1,"uk",1,"boa",1],"histogram":["1",4]},"name":{"type":"text_general","schema":"ITS-------------","index":"ITS-------------","docs":4,"distinct":6,"topTerms":["one",4,"euro",1,"krone",1,"dollar",1,"pound",1,"british",1],"histogram":["1",5,"2",0,"4",1]},"price_c":{"type":"currency","schema":"I-S------F------","dynamicBase":"*_c"},"price_c____amount_raw":{"type":"amount_raw_type_tlong","schema":"IT------O-------","dynamicBase":"*____amount_raw","index":"(unstored field)","docs":4,"distinct":8,"topTerms":["0",4,"0",4,"0",4,"0",4,"0",4,"0",4,"0",4,"100",4],"histogram":["1",0,"2",0,"4",8]},"price_c____currency":{"type":"currency_type_string","schema":"I-------O-------","dynamicBase":"*____currency","index":"(unstored field)","docs":4,"distinct":4,"topTerms":["GBP",1,"NOK",1,"USD",1,"EUR",1],"histogram":["1",4]},"romain_t":{"type":"text_general","schema":"ITS-------------","dynamicBase":"*_t","index":"ITS-------------","docs":1,"distinct":1,"topTerms":["true",1],"histogram":["1",1]},"text":{"type":"text_general","schema":"IT--M-----------","index":"(unstored field)","docs":8,"distinct":21,"topTerms":["and",4,"currency",4,"notes",4,"one",4,"coins",4,"bank",2,"of",2,"change.me112",1,"change.me1",1,"change.me",1],"histogram":["1",14,"2",2,"4",5]},"title":{"type":"text_general","schema":"ITS-M-----------","index":"ITS-------------","docs":4,"distinct":4,"topTerms":["change.me1",1,"change.me112",1,"change.me12",1,"change.me",1],"histogram":["1",4]}},"info":{"key":{"I":"Indexed","T":"Tokenized","S":"Stored","D":"DocValues","M":"Multivalued","V":"TermVector Stored","o":"Store Offset With TermVector","p":"Store Position With TermVector","O":"Omit Norms","F":"Omit Term Frequencies & Positions","P":"Omit Positions","H":"Store Offsets with Positions","L":"Lazy","B":"Binary","f":"Sort Missing First","l":"Sort Missing Last"},"NOTE":"Document Frequency (df) is not updated when a document is marked for deletion. df values include deleted documents."}}""")
assert_equal(
# Dynamic fields not included for now
[{'isDynamic': False, 'isId': None, 'type': 'string', 'name': '<script>alert(1234)</script>'},
{'isDynamic': False, 'isId': None, 'type': 'long', 'name': '_version_'},
{'isDynamic': False, 'isId': None, 'type': 'text_general', 'name': 'author'},
{'isDynamic': False, 'isId': None, 'type': 'text_general', 'name': 'category'},
{'isDynamic': False, 'isId': None, 'type': 'text_general', 'name': 'comments'},
{'isDynamic': False, 'isId': None, 'type': 'text_general', 'name': 'content'},
{'isDynamic': False, 'isId': None, 'type': 'string', 'name': 'content_type'},
{'isDynamic': False, 'isId': None, 'type': 'text_general', 'name': 'description'},
{'isDynamic': False, 'isId': None, 'type': 'text_general', 'name': 'features'},
{'isDynamic': False, 'isId': None, 'type': 'boolean', 'name': 'inStock'},
{'isDynamic': False, 'isId': None, 'type': 'text_general', 'name': 'includes'},
{'isDynamic': False, 'isId': None, 'type': 'text_general', 'name': 'keywords'},
{'isDynamic': False, 'isId': None, 'type': 'date', 'name': 'last_modified'},
{'isDynamic': False, 'isId': None, 'type': 'string', 'name': 'links'},
{'isDynamic': False, 'isId': None, 'type': 'text_general', 'name': 'manu'},
{'isDynamic': False, 'isId': None, 'type': 'string', 'name': 'manu_exact'},
{'isDynamic': False, 'isId': None, 'type': 'text_general', 'name': 'name'},
{'isDynamic': False, 'isId': None, 'type': 'payloads', 'name': 'payloads'},
{'isDynamic': False, 'isId': None, 'type': 'int', 'name': 'popularity'},
{'isDynamic': False, 'isId': None, 'type': 'float', 'name': 'price'},
{'isDynamic': False, 'isId': None, 'type': 'text_general', 'name': 'resourcename'},
{'isDynamic': False, 'isId': None, 'type': 'text_en_splitting_tight', 'name': 'sku'},
{'isDynamic': False, 'isId': None, 'type': 'location', 'name': 'store'},
{'isDynamic': False, 'isId': None, 'type': 'text_general', 'name': 'subject'},
{'isDynamic': False, 'isId': None, 'type': 'text_general', 'name': 'text'},
{'isDynamic': False, 'isId': None, 'type': 'text_general_rev', 'name': 'text_rev'},
{'isDynamic': False, 'isId': None, 'type': 'text_general', 'name': 'title'},
{'isDynamic': False, 'isId': None, 'type': 'text_general', 'name': 'url'},
{'isDynamic': False, 'isId': None, 'type': 'float', 'name': 'weight'},
{'isDynamic': False, 'isId': True, 'type': 'string', 'name': 'id'}],
self.collection.fields_data(self.user, 'collection_1')
)
# TODO
# test facet with userlocation: türkiye, 東京, new york
def test_download(self):
MockResource.set_solr_response("""{"responseHeader":{"status":0,"QTime":59,"params":{"facet":"true","facet.mincount":"1","facet.limit":"100","facet.date":"article_date","f.article_date.facet.date.start":"NOW-7MONTH/DAYS","wt":"json","rows":"15","user.name":"hue","start":"0","facet.sort":"count","q":"*:*","f.article_date.facet.date.end":"NOW-5MONTH","doAs":"romain","f.article_date.facet.date.gap":"+1DAYS","facet.field":["journal_title","author_facet"],"fq":["article_date:[2013-06-13T00:00:00Z TO 2013-06-13T00:00:00Z+1DAYS]","journal_title:\\"in\\""]}},"response":{"numFound":4,"start":0,"maxScore":1.0,"docs":[{"article_title":"Investigations for neonatal seizures.","journal_issn":"1878-0946","article_abstract_text":["Seizures during the neonatal period are always medical emergencies. Apart from the need for rapid anticonvulsive treatment, the underlying condition is often not immediately obvious. In the search for the correct diagnosis, a thorough history, clinical examination, laboratory work-up, neurophysiological and neuroradiological investigations are all essential. A close collaboration between neonatologists, neuropaediatricians, laboratory specialists, neurophysiologists and radiologists facilitates the adequate care of the infant."],"ontologies":["36481|1 "],"article_date":"2013-06-13T00:00:00Z","journal_title":"Seminars in fetal & neonatal medicine","date_created":"2013-08-22T00:00:00Z","journal_country":"Netherlands","journal_iso_abbreviation":"Semin Fetal Neonatal Med","id":"23680099","author":["B B Hallberg","M M Blennow"],"article_pagination":"196-201","journal_publication_date":"2013-08-22T00:00:00Z","affiliation":"Department of Neonatology, Karolinska Institutet and University Hospital, Stockholm, Sweden. boubou.hallberg@ki.se","language":"eng","_version_":1450807641462800385},{"article_title":"Enantiomeric selection properties of β-homoDNA: enhanced pairing for heterochiral complexes.","journal_issn":"1521-3773","article_date":"2013-06-13T00:00:00Z","journal_title":"Angewandte Chemie (International ed. in English)","date_created":"2013-07-20T00:00:00Z","journal_country":"Germany","journal_iso_abbreviation":"Angew. Chem. Int. Ed. Engl.","id":"23670912","author":["Daniele D D'Alonzo","Jussara J Amato","Guy G Schepers","Matheus M Froeyen","Arthur A Van Aerschot","Piet P Herdewijn","Annalisa A Guaragna"],"article_pagination":"6662-5","journal_publication_date":"2013-06-24T00:00:00Z","affiliation":"Dipartimento di Scienze Chimiche, Università degli Studi di Napoli Federico II, Via Cintia 21, 80126 Napoli, Italy. dandalonzo@unina.it","language":"eng","_version_":1450807661929955329},{"article_title":"Interference of bacterial cell-to-cell communication: a new concept of antimicrobial chemotherapy breaks antibiotic resistance.","journal_issn":"1664-302X","article_abstract_text":["Bacteria use a cell-to-cell communication activity termed \\"quorum sensing\\" to coordinate group behaviors in a cell density dependent manner. Quorum sensing influences the expression profile of diverse genes, including antibiotic tolerance and virulence determinants, via specific chemical compounds called \\"autoinducers\\". During quorum sensing, Gram-negative bacteria typically use an acylated homoserine lactone (AHL) called autoinducer 1. Since the first discovery of quorum sensing in a marine bacterium, it has been recognized that more than 100 species possess this mechanism of cell-to-cell communication. In addition to being of interest from a biological standpoint, quorum sensing is a potential target for antimicrobial chemotherapy. This unique concept of antimicrobial control relies on reducing the burden of virulence rather than killing the bacteria. It is believed that this approach will not only suppress the development of antibiotic resistance, but will also improve the treatment of refractory infections triggered by multi-drug resistant pathogens. In this paper, we review and track recent progress in studies on AHL inhibitors/modulators from a biological standpoint. It has been discovered that both natural and synthetic compounds can disrupt quorum sensing by a variety of means, such as jamming signal transduction, inhibition of signal production and break-down and trapping of signal compounds. We also focus on the regulatory elements that attenuate quorum sensing activities and discuss their unique properties. Understanding the biological roles of regulatory elements might be useful in developing inhibitor applications and understanding how quorum sensing is controlled."],"ontologies":["2402|1 ","1875|1 ","2047|3 ","36690|1 ","8120|1 ","1872|1 ","1861|1 ","1955|2 ","38027|1 ","3853|1 ","2237|3 ","37074|1 ","3043|2 ","36478|1 ","4403|1 ","2751|1 ","10751|1 ","36467|1 ","2387|1 ","7278|3 ","3826|1 "],"article_date":"2013-06-13T00:00:00Z","journal_title":"Frontiers in microbiology","date_created":"2013-06-30T00:00:00Z","journal_country":"Switzerland","journal_iso_abbreviation":"Front Microbiol","id":"23720655","author":["Hidetada H Hirakawa","Haruyoshi H Tomita"],"article_pagination":"114","journal_publication_date":"2013-09-13T00:00:00Z","affiliation":"Advanced Scientific Research Leaders Development Unit, Gunma University Maebashi, Gunma, Japan.","language":"eng","_version_":1450807662055784448},{"article_title":"The role of musical training in emergent and event-based timing.","journal_issn":"1662-5161","article_abstract_text":["Introduction: Musical performance is thought to rely predominantly on event-based timing involving a clock-like neural process and an explicit internal representation of the time interval. Some aspects of musical performance may rely on emergent timing, which is established through the optimization of movement kinematics, and can be maintained without reference to any explicit representation of the time interval. We predicted that musical training would have its largest effect on event-based timing, supporting the dissociability of these timing processes and the dominance of event-based timing in musical performance. Materials and Methods: We compared 22 musicians and 17 non-musicians on the prototypical event-based timing task of finger tapping and on the typically emergently timed task of circle drawing. For each task, participants first responded in synchrony with a metronome (Paced) and then responded at the same rate without the metronome (Unpaced). Results: Analyses of the Unpaced phase revealed that non-musicians were more variable in their inter-response intervals for finger tapping compared to circle drawing. Musicians did not differ between the two tasks. Between groups, non-musicians were more variable than musicians for tapping but not for drawing. We were able to show that the differences were due to less timer variability in musicians on the tapping task. Correlational analyses of movement jerk and inter-response interval variability revealed a negative association for tapping and a positive association for drawing in non-musicians only. Discussion: These results suggest that musical training affects temporal variability in tapping but not drawing. Additionally, musicians and non-musicians may be employing different movement strategies to maintain accurate timing in the two tasks. These findings add to our understanding of how musical training affects timing and support the dissociability of event-based and emergent timing modes."],"ontologies":["36810|1 ","49002|1 ","3132|1 ","3797|1 ","37953|1 ","36563|2 ","524|1 ","3781|1 ","2848|1 ","17163|1 ","17165|1 ","49010|1 ","36647|3 ","36529|1 ","2936|1 ","2643|1 ","714|1 ","3591|1 ","2272|1 ","3103|1 ","2265|1 ","37051|1 ","3691|1 "],"article_date":"2013-06-14T00:00:00Z","journal_title":"Frontiers in human neuroscience","date_created":"2013-06-29T00:00:00Z","journal_country":"Switzerland","journal_iso_abbreviation":"Front Hum Neurosci","id":"23717275","author":["L H LH Baer","J L N JL Thibodeau","T M TM Gralnick","K Z H KZ Li","V B VB Penhune"],"article_pagination":"191","journal_publication_date":"2013-09-13T00:00:00Z","affiliation":"Department of Psychology, Centre for Research in Human Development, Concordia University Montréal, QC, Canada.","language":"eng","_version_":1450807667479019520}]},"facet_counts":{"facet_queries":{},"facet_fields":{"journal_title":["in",4,"frontiers",2,"angewandte",1,"chemie",1,"ed",1,"english",1,"fetal",1,"human",1,"international",1,"medicine",1,"microbiology",1,"neonatal",1,"neuroscience",1,"seminars",1],"author_facet":["Annalisa A Guaragna",1,"Arthur A Van Aerschot",1,"B B Hallberg",1,"Daniele D D'Alonzo",1,"Guy G Schepers",1,"Haruyoshi H Tomita",1,"Hidetada H Hirakawa",1,"J L N JL Thibodeau",1,"Jussara J Amato",1,"K Z H KZ Li",1,"L H LH Baer",1,"M M Blennow",1,"Matheus M Froeyen",1,"Piet P Herdewijn",1,"T M TM Gralnick",1,"V B VB Penhune",1]},"facet_dates":{"article_date":{"gap":"+1DAYS","start":"2013-04-27T00:00:00Z","end":"2013-06-28T00:00:00Z"}},"facet_ranges":{}},"highlighting":{"23680099":{},"23670912":{},"23720655":{},"23717275":{}},"spellcheck":{"suggestions":["correctlySpelled",false]}}""")
json_response = self.c.post(reverse('dashboard:download'), {
'type': 'json',
'collection': json.dumps(self._get_collection_param(self.collection)),
'query': json.dumps(QUERY)
})
json_response_content = json.loads(json_response.content)
assert_equal('application/json', json_response['Content-Type'])
assert_equal('attachment; filename="query_result.json"', json_response['Content-Disposition'])
assert_equal(4, len(json_response_content), len(json_response_content))
assert_equal('Investigations for neonatal seizures.', json_response_content[0]['article_title'])
csv_response = self.c.post(reverse('dashboard:download'), {
'type': 'csv',
'collection': json.dumps(self._get_collection_param(self.collection)),
'query': json.dumps(QUERY)
})
csv_response_content = ''.join(csv_response.streaming_content)
assert_equal('application/csv', csv_response['Content-Type'])
assert_equal('attachment; filename="query_result.csv"', csv_response['Content-Disposition'])
assert_equal(4 + 1 + 1, len(csv_response_content.split('\n')), csv_response_content.split('\n'))
assert_true('<script>alert(1234)</script>,_version_,author,category,comments,content,content_type,description,features,inStock,includes,keywords,last_modified,links,manu,manu_exact,name,payloads,popularity,price,resourcename,sku,store,subject,text,text_rev,title,url,weight,id' in csv_response_content, csv_response_content)
# Fields does not exactly match the response but this is because the collection schema does not match the query response.
assert_true(""",1450807641462800385,"['B B Hallberg', 'M M Blennow']",,,,,,,,,,,,,,,,,,,,,,,,,,,23680099""" in csv_response_content, csv_response_content)
xls_response = self.c.post(reverse('dashboard:download'), {
'type': 'xls',
'collection': json.dumps(self._get_collection_param(self.collection)),
'query': json.dumps(QUERY)
})
xls_response_content = ''.join(xls_response.content)
assert_not_equal(0, len(xls_response_content))
assert_equal('application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', xls_response['Content-Type'])
assert_equal('attachment; filename="query_result.xlsx"', xls_response['Content-Disposition'])
def test_index_xss(self):
doc = Document2.objects.create(
name='test_dashboard',
type='search-dashboard',
owner=self.user,
data=json.dumps(self.collection.data),
parent_directory=self.home_dir
)
try:
response = self.c.get(reverse('dashboard:index') + ('?collection=%s' % doc.id) + '&q=</script><script>alert(%27XSS%27)</script>')
assert_equal('{"fqs": [], "qs": [{"q": "alert(\'XSS\')"}], "start": 0}', response.context[0]['query'])
finally:
doc.delete()
def test_augment_response(self):
collection = self._get_collection_param(self.collection)
query = QUERY
response = {
'response': {
'docs': [
{'id': 111, "link-meta": "{\"type\": \"hdfs\", \"path\": \"/user/hue/pdf/sql_editor.pdf\"}"}
]
}
}
# Don't blow-up with Expecting property name: line 1 column 1 (char 1)
augment_response(collection, query, response)
SOLR_LUKE_SCHEMA = """{"responseHeader":{"status":0,"QTime":2},"index":{"numDocs":8,"maxDoc":8,"deletedDocs":0,"version":15,"segmentCount":5,"current":true,"hasDeletions":false,"directory":"org.apache.lucene.store.NRTCachingDirectory:NRTCachingDirectory(org.apache.solr.store.hdfs.HdfsDirectory@5efe087b lockFactory=org.apache.solr.store.hdfs.HdfsLockFactory@5106def2; maxCacheMB=192.0 maxMergeSizeMB=16.0)","userData":{"commitTimeMSec":"1389233070579"},"lastModified":"2014-01-09T02:04:30.579Z"},"schema":{"fields":{"_version_":{"type":"long","flags":"ITS-----OF------","copyDests":[],"copySources":[]},"author":{"type":"text_general","flags":"ITS-------------","positionIncrementGap":100,"copyDests":["author_s","text"],"copySources":[]},"<script>alert(1234)</script>":{"type":"string","flags":"I-S-M---OF-----l","copyDests":["text"],"copySources":[]},"category":{"type":"text_general","flags":"ITS-------------","positionIncrementGap":100,"copyDests":[],"copySources":[]},"comments":{"type":"text_general","flags":"ITS-------------","positionIncrementGap":100,"copyDests":[],"copySources":[]},"content":{"type":"text_general","flags":"-TS-M-----------","positionIncrementGap":100,"copyDests":["text"],"copySources":[]},"content_type":{"type":"string","flags":"I-S-M---OF-----l","copyDests":["text"],"copySources":[]},"description":{"type":"text_general","flags":"ITS-------------","positionIncrementGap":100,"copyDests":["text"],"copySources":[]},"features":{"type":"text_general","flags":"ITS-M-----------","positionIncrementGap":100,"copyDests":["text"],"copySources":[]},"id":{"type":"string","flags":"I-S-----OF-----l","required":true,"uniqueKey":true,"copyDests":[],"copySources":[]},"inStock":{"type":"boolean","flags":"I-S-----OF-----l","copyDests":[],"copySources":[]},"includes":{"type":"text_general","flags":"ITS--Vop--------","positionIncrementGap":100,"copyDests":["text"],"copySources":[]},"keywords":{"type":"text_general","flags":"ITS-------------","positionIncrementGap":100,"copyDests":["text"],"copySources":[]},"last_modified":{"type":"date","flags":"ITS------F------","copyDests":[],"copySources":[]},"links":{"type":"string","flags":"I-S-M---OF-----l","copyDests":[],"copySources":[]},"manu":{"type":"text_general","flags":"ITS-----O-------","positionIncrementGap":100,"copyDests":["text","manu_exact"],"copySources":[]},"manu_exact":{"type":"string","flags":"I-------OF-----l","copyDests":[],"copySources":["manu"]},"name":{"type":"text_general","flags":"ITS-------------","positionIncrementGap":100,"copyDests":["text"],"copySources":[]},"payloads":{"type":"payloads","flags":"ITS-------------","copyDests":[],"copySources":[]},"popularity":{"type":"int","flags":"ITS-----OF------","copyDests":[],"copySources":[]},"price":{"type":"float","flags":"ITS-----OF------","copyDests":["price_c"],"copySources":[]},"resourcename":{"type":"text_general","flags":"ITS-------------","positionIncrementGap":100,"copyDests":["text"],"copySources":[]},"sku":{"type":"text_en_splitting_tight","flags":"ITS-----O-------","positionIncrementGap":100,"copyDests":[],"copySources":[]},"store":{"type":"location","flags":"I-S------F------","copyDests":[],"copySources":[]},"subject":{"type":"text_general","flags":"ITS-------------","positionIncrementGap":100,"copyDests":[],"copySources":[]},"text":{"type":"text_general","flags":"IT--M-----------","positionIncrementGap":100,"copyDests":[],"copySources":["cat","keywords","resourcename","includes","url","content","author","title","manu","description","name","features","content_type"]},"text_rev":{"type":"text_general_rev","flags":"IT--M-----------","positionIncrementGap":100,"copyDests":[],"copySources":[]},"title":{"type":"text_general","flags":"ITS-M-----------","positionIncrementGap":100,"copyDests":["text"],"copySources":[]},"url":{"type":"text_general","flags":"ITS-------------","positionIncrementGap":100,"copyDests":["text"],"copySources":[]},"weight":{"type":"float","flags":"ITS-----OF------","copyDests":[],"copySources":[]}},"dynamicFields":{"*____amount_raw":{"type":"amount_raw_type_tlong","flags":"IT------O-------","copyDests":[],"copySources":[]},"*____currency":{"type":"currency_type_string","flags":"I-------O-------","copyDests":[],"copySources":[]},"*_b":{"type":"boolean","flags":"I-S-----OF-----l","copyDests":[],"copySources":[]},"*_bs":{"type":"boolean","flags":"I-S-M---OF-----l","copyDests":[],"copySources":[]},"*_c":{"type":"currency","flags":"I-S------F------","copyDests":[],"copySources":[]},"*_coordinate":{"type":"tdouble","flags":"IT------OF------","copyDests":[],"copySources":[]},"*_d":{"type":"double","flags":"ITS-----OF------","copyDests":[],"copySources":[]},"*_ds":{"type":"double","flags":"ITS-M---OF------","copyDests":[],"copySources":[]},"*_dt":{"type":"date","flags":"ITS------F------","copyDests":[],"copySources":[]},"*_dts":{"type":"date","flags":"ITS-M----F------","copyDests":[],"copySources":[]},"*_en":{"type":"text_en","flags":"ITS-M-----------","positionIncrementGap":100,"copyDests":[],"copySources":[]},"*_f":{"type":"float","flags":"ITS-----OF------","copyDests":[],"copySources":[]},"*_fs":{"type":"float","flags":"ITS-M---OF------","copyDests":[],"copySources":[]},"*_i":{"type":"int","flags":"ITS-----OF------","copyDests":[],"copySources":[]},"*_is":{"type":"int","flags":"ITS-M---OF------","copyDests":[],"copySources":[]},"*_l":{"type":"long","flags":"ITS-----OF------","copyDests":[],"copySources":[]},"*_ls":{"type":"long","flags":"ITS-M---OF------","copyDests":[],"copySources":[]},"*_p":{"type":"location","flags":"I-S------F------","copyDests":[],"copySources":[]},"*_pi":{"type":"pint","flags":"I-S-----OF------","copyDests":[],"copySources":[]},"*_s":{"type":"string","flags":"I-S-----OF-----l","copyDests":[],"copySources":[]},"*_ss":{"type":"string","flags":"I-S-M---OF-----l","copyDests":[],"copySources":[]},"*_t":{"type":"text_general","flags":"ITS-------------","positionIncrementGap":100,"copyDests":[],"copySources":[]},"*_td":{"type":"tdouble","flags":"ITS-----OF------","copyDests":[],"copySources":[]},"*_tdt":{"type":"tdate","flags":"ITS------F------","copyDests":[],"copySources":[]},"*_tf":{"type":"tfloat","flags":"ITS-----OF------","copyDests":[],"copySources":[]},"*_ti":{"type":"tint","flags":"ITS-----OF------","copyDests":[],"copySources":[]},"*_tl":{"type":"tlong","flags":"ITS-----OF------","copyDests":[],"copySources":[]},"*_txt":{"type":"text_general","flags":"ITS-M-----------","positionIncrementGap":100,"copyDests":[],"copySources":[]},"attr_*":{"type":"text_general","flags":"ITS-M-----------","positionIncrementGap":100,"copyDests":[],"copySources":[]},"ignored_*":{"type":"ignored","flags":"----M---OF------","copyDests":[],"copySources":[]},"random_*":{"type":"random","flags":"I-S------F------","copyDests":[],"copySources":[]}},"uniqueKeyField":"id","defaultSearchField":null,"types":{"alphaOnlySort":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.core.KeywordTokenizerFactory","args":{"class":"solr.KeywordTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"TrimFilterFactory":{"args":{"class":"solr.TrimFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.miscellaneous.TrimFilterFactory"},"PatternReplaceFilterFactory":{"args":{"replace":"all","replacement":"","pattern":"([^a-z])","class":"solr.PatternReplaceFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.pattern.PatternReplaceFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.core.KeywordTokenizerFactory","args":{"class":"solr.KeywordTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"TrimFilterFactory":{"args":{"class":"solr.TrimFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.miscellaneous.TrimFilterFactory"},"PatternReplaceFilterFactory":{"args":{"replace":"all","replacement":"","pattern":"([^a-z])","class":"solr.PatternReplaceFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.pattern.PatternReplaceFilterFactory"}}},"similarity":{}},"ancestor_path":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.core.KeywordTokenizerFactory","args":{"class":"solr.KeywordTokenizerFactory","luceneMatchVersion":"LUCENE_44"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.path.PathHierarchyTokenizerFactory","args":{"delimiter":"/","class":"solr.PathHierarchyTokenizerFactory","luceneMatchVersion":"LUCENE_44"}}},"similarity":{}},"binary":{"fields":null,"tokenized":false,"className":"org.apache.solr.schema.BinaryField","indexAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"queryAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"similarity":{}},"boolean":{"fields":["inStock","*_bs","*_b"],"tokenized":false,"className":"org.apache.solr.schema.BoolField","indexAnalyzer":{"className":"org.apache.solr.schema.BoolField$1"},"queryAnalyzer":{"className":"org.apache.solr.schema.BoolField$1"},"similarity":{}},"currency":{"fields":["*_c"],"tokenized":false,"className":"org.apache.solr.schema.CurrencyField","indexAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"queryAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"similarity":{}},"date":{"fields":["last_modified","*_dts","*_dt"],"tokenized":true,"className":"org.apache.solr.schema.TrieDateField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.solr.analysis.TrieTokenizerFactory","args":{}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.solr.analysis.TrieTokenizerFactory","args":{}}},"similarity":{}},"descendent_path":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.path.PathHierarchyTokenizerFactory","args":{"delimiter":"/","class":"solr.PathHierarchyTokenizerFactory","luceneMatchVersion":"LUCENE_44"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.core.KeywordTokenizerFactory","args":{"class":"solr.KeywordTokenizerFactory","luceneMatchVersion":"LUCENE_44"}}},"similarity":{}},"double":{"fields":["*_ds","*_d"],"tokenized":true,"className":"org.apache.solr.schema.TrieDoubleField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.solr.analysis.TrieTokenizerFactory","args":{}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.solr.analysis.TrieTokenizerFactory","args":{}}},"similarity":{}},"float":{"fields":["weight","price","*_fs","*_f"],"tokenized":true,"className":"org.apache.solr.schema.TrieFloatField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.solr.analysis.TrieTokenizerFactory","args":{}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.solr.analysis.TrieTokenizerFactory","args":{}}},"similarity":{}},"ignored":{"fields":["ignored_*"],"tokenized":false,"className":"org.apache.solr.schema.StrField","indexAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"queryAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"similarity":{}},"int":{"fields":["popularity","*_is","*_i"],"tokenized":true,"className":"org.apache.solr.schema.TrieIntField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.solr.analysis.TrieTokenizerFactory","args":{}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.solr.analysis.TrieTokenizerFactory","args":{}}},"similarity":{}},"location":{"fields":["store","*_p"],"tokenized":false,"className":"org.apache.solr.schema.LatLonType","indexAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"queryAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"similarity":{}},"location_rpt":{"fields":null,"tokenized":false,"className":"org.apache.solr.schema.SpatialRecursivePrefixTreeFieldType","indexAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"queryAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"similarity":{}},"long":{"fields":["_version_","*_ls","*_l"],"tokenized":true,"className":"org.apache.solr.schema.TrieLongField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.solr.analysis.TrieTokenizerFactory","args":{}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.solr.analysis.TrieTokenizerFactory","args":{}}},"similarity":{}},"lowercase":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.core.KeywordTokenizerFactory","args":{"class":"solr.KeywordTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.core.KeywordTokenizerFactory","args":{"class":"solr.KeywordTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"}}},"similarity":{}},"payloads":{"fields":["payloads"],"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.core.WhitespaceTokenizerFactory","args":{"class":"solr.WhitespaceTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"DelimitedPayloadTokenFilterFactory":{"args":{"class":"solr.DelimitedPayloadTokenFilterFactory","luceneMatchVersion":"LUCENE_44","encoder":"float"},"className":"org.apache.lucene.analysis.payloads.DelimitedPayloadTokenFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.core.WhitespaceTokenizerFactory","args":{"class":"solr.WhitespaceTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"DelimitedPayloadTokenFilterFactory":{"args":{"class":"solr.DelimitedPayloadTokenFilterFactory","luceneMatchVersion":"LUCENE_44","encoder":"float"},"className":"org.apache.lucene.analysis.payloads.DelimitedPayloadTokenFilterFactory"}}},"similarity":{}},"pdate":{"fields":null,"tokenized":false,"className":"org.apache.solr.schema.DateField","indexAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"queryAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"similarity":{}},"pdouble":{"fields":null,"tokenized":false,"className":"org.apache.solr.schema.DoubleField","indexAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"queryAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"similarity":{}},"pfloat":{"fields":null,"tokenized":false,"className":"org.apache.solr.schema.FloatField","indexAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"queryAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"similarity":{}},"phonetic":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"DoubleMetaphoneFilterFactory":{"args":{"inject":"false","class":"solr.DoubleMetaphoneFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.phonetic.DoubleMetaphoneFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"DoubleMetaphoneFilterFactory":{"args":{"inject":"false","class":"solr.DoubleMetaphoneFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.phonetic.DoubleMetaphoneFilterFactory"}}},"similarity":{}},"pint":{"fields":["*_pi"],"tokenized":false,"className":"org.apache.solr.schema.IntField","indexAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"queryAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"similarity":{}},"plong":{"fields":null,"tokenized":false,"className":"org.apache.solr.schema.LongField","indexAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"queryAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"similarity":{}},"point":{"fields":null,"tokenized":false,"className":"org.apache.solr.schema.PointType","indexAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"queryAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"similarity":{}},"random":{"fields":["random_*"],"tokenized":false,"className":"org.apache.solr.schema.RandomSortField","indexAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"queryAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"similarity":{}},"string":{"fields":["cat","id","manu_exact","content_type","links","*_ss","*_s"],"tokenized":false,"className":"org.apache.solr.schema.StrField","indexAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"queryAnalyzer":{"className":"org.apache.solr.schema.FieldType$DefaultAnalyzer"},"similarity":{}},"tdate":{"fields":["*_tdt"],"tokenized":true,"className":"org.apache.solr.schema.TrieDateField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.solr.analysis.TrieTokenizerFactory","args":{}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.solr.analysis.TrieTokenizerFactory","args":{}}},"similarity":{}},"tdouble":{"fields":["*_coordinate","*_td"],"tokenized":true,"className":"org.apache.solr.schema.TrieDoubleField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.solr.analysis.TrieTokenizerFactory","args":{}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.solr.analysis.TrieTokenizerFactory","args":{}}},"similarity":{}},"text_ar":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_ar.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"ArabicNormalizationFilterFactory":{"args":{"class":"solr.ArabicNormalizationFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.ar.ArabicNormalizationFilterFactory"},"ArabicStemFilterFactory":{"args":{"class":"solr.ArabicStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.ar.ArabicStemFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_ar.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"ArabicNormalizationFilterFactory":{"args":{"class":"solr.ArabicNormalizationFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.ar.ArabicNormalizationFilterFactory"},"ArabicStemFilterFactory":{"args":{"class":"solr.ArabicStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.ar.ArabicStemFilterFactory"}}},"similarity":{}},"text_bg":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_bg.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"BulgarianStemFilterFactory":{"args":{"class":"solr.BulgarianStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.bg.BulgarianStemFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_bg.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"BulgarianStemFilterFactory":{"args":{"class":"solr.BulgarianStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.bg.BulgarianStemFilterFactory"}}},"similarity":{}},"text_ca":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"ElisionFilterFactory":{"args":{"articles":"lang/contractions_ca.txt","class":"solr.ElisionFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.util.ElisionFilterFactory"},"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_ca.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Catalan","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"ElisionFilterFactory":{"args":{"articles":"lang/contractions_ca.txt","class":"solr.ElisionFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.util.ElisionFilterFactory"},"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_ca.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Catalan","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"similarity":{}},"text_cjk":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"CJKWidthFilterFactory":{"args":{"class":"solr.CJKWidthFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.cjk.CJKWidthFilterFactory"},"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"CJKBigramFilterFactory":{"args":{"class":"solr.CJKBigramFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.cjk.CJKBigramFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"CJKWidthFilterFactory":{"args":{"class":"solr.CJKWidthFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.cjk.CJKWidthFilterFactory"},"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"CJKBigramFilterFactory":{"args":{"class":"solr.CJKBigramFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.cjk.CJKBigramFilterFactory"}}},"similarity":{}},"text_cz":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_cz.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"CzechStemFilterFactory":{"args":{"class":"solr.CzechStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.cz.CzechStemFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_cz.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"CzechStemFilterFactory":{"args":{"class":"solr.CzechStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.cz.CzechStemFilterFactory"}}},"similarity":{}},"text_da":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_da.txt","class":"solr.StopFilterFactory","format":"snowball","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Danish","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_da.txt","class":"solr.StopFilterFactory","format":"snowball","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Danish","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"similarity":{}},"text_de":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_de.txt","class":"solr.StopFilterFactory","format":"snowball","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"GermanNormalizationFilterFactory":{"args":{"class":"solr.GermanNormalizationFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.de.GermanNormalizationFilterFactory"},"GermanLightStemFilterFactory":{"args":{"class":"solr.GermanLightStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.de.GermanLightStemFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_de.txt","class":"solr.StopFilterFactory","format":"snowball","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"GermanNormalizationFilterFactory":{"args":{"class":"solr.GermanNormalizationFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.de.GermanNormalizationFilterFactory"},"GermanLightStemFilterFactory":{"args":{"class":"solr.GermanLightStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.de.GermanLightStemFilterFactory"}}},"similarity":{}},"text_el":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"GreekLowerCaseFilterFactory":{"args":{"class":"solr.GreekLowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.el.GreekLowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_el.txt","class":"solr.StopFilterFactory","ignoreCase":"false","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"GreekStemFilterFactory":{"args":{"class":"solr.GreekStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.el.GreekStemFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"GreekLowerCaseFilterFactory":{"args":{"class":"solr.GreekLowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.el.GreekLowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_el.txt","class":"solr.StopFilterFactory","ignoreCase":"false","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"GreekStemFilterFactory":{"args":{"class":"solr.GreekStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.el.GreekStemFilterFactory"}}},"similarity":{}},"text_en":{"fields":["*_en"],"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"StopFilterFactory":{"args":{"words":"lang/stopwords_en.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"EnglishPossessiveFilterFactory":{"args":{"class":"solr.EnglishPossessiveFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.en.EnglishPossessiveFilterFactory"},"KeywordMarkerFilterFactory":{"args":{"protected":"protwords.txt","class":"solr.KeywordMarkerFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.miscellaneous.KeywordMarkerFilterFactory"},"PorterStemFilterFactory":{"args":{"class":"solr.PorterStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.en.PorterStemFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"SynonymFilterFactory":{"args":{"class":"solr.SynonymFilterFactory","expand":"true","synonyms":"synonyms.txt","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.synonym.SynonymFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_en.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"EnglishPossessiveFilterFactory":{"args":{"class":"solr.EnglishPossessiveFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.en.EnglishPossessiveFilterFactory"},"KeywordMarkerFilterFactory":{"args":{"protected":"protwords.txt","class":"solr.KeywordMarkerFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.miscellaneous.KeywordMarkerFilterFactory"},"PorterStemFilterFactory":{"args":{"class":"solr.PorterStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.en.PorterStemFilterFactory"}}},"similarity":{}},"text_en_splitting":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.core.WhitespaceTokenizerFactory","args":{"class":"solr.WhitespaceTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"StopFilterFactory":{"args":{"words":"lang/stopwords_en.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"WordDelimiterFilterFactory":{"args":{"generateNumberParts":"1","splitOnCaseChange":"1","catenateWords":"1","class":"solr.WordDelimiterFilterFactory","generateWordParts":"1","luceneMatchVersion":"LUCENE_44","catenateAll":"0","catenateNumbers":"1"},"className":"org.apache.lucene.analysis.miscellaneous.WordDelimiterFilterFactory"},"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"KeywordMarkerFilterFactory":{"args":{"protected":"protwords.txt","class":"solr.KeywordMarkerFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.miscellaneous.KeywordMarkerFilterFactory"},"PorterStemFilterFactory":{"args":{"class":"solr.PorterStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.en.PorterStemFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.core.WhitespaceTokenizerFactory","args":{"class":"solr.WhitespaceTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"SynonymFilterFactory":{"args":{"class":"solr.SynonymFilterFactory","expand":"true","synonyms":"synonyms.txt","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.synonym.SynonymFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_en.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"WordDelimiterFilterFactory":{"args":{"generateNumberParts":"1","splitOnCaseChange":"1","catenateWords":"0","class":"solr.WordDelimiterFilterFactory","generateWordParts":"1","luceneMatchVersion":"LUCENE_44","catenateAll":"0","catenateNumbers":"0"},"className":"org.apache.lucene.analysis.miscellaneous.WordDelimiterFilterFactory"},"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"KeywordMarkerFilterFactory":{"args":{"protected":"protwords.txt","class":"solr.KeywordMarkerFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.miscellaneous.KeywordMarkerFilterFactory"},"PorterStemFilterFactory":{"args":{"class":"solr.PorterStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.en.PorterStemFilterFactory"}}},"similarity":{}},"text_en_splitting_tight":{"fields":["sku"],"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.core.WhitespaceTokenizerFactory","args":{"class":"solr.WhitespaceTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"SynonymFilterFactory":{"args":{"class":"solr.SynonymFilterFactory","expand":"false","synonyms":"synonyms.txt","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.synonym.SynonymFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_en.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"WordDelimiterFilterFactory":{"args":{"generateNumberParts":"0","catenateWords":"1","class":"solr.WordDelimiterFilterFactory","generateWordParts":"0","luceneMatchVersion":"LUCENE_44","catenateAll":"0","catenateNumbers":"1"},"className":"org.apache.lucene.analysis.miscellaneous.WordDelimiterFilterFactory"},"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"KeywordMarkerFilterFactory":{"args":{"protected":"protwords.txt","class":"solr.KeywordMarkerFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.miscellaneous.KeywordMarkerFilterFactory"},"EnglishMinimalStemFilterFactory":{"args":{"class":"solr.EnglishMinimalStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.en.EnglishMinimalStemFilterFactory"},"RemoveDuplicatesTokenFilterFactory":{"args":{"class":"solr.RemoveDuplicatesTokenFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.miscellaneous.RemoveDuplicatesTokenFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.core.WhitespaceTokenizerFactory","args":{"class":"solr.WhitespaceTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"SynonymFilterFactory":{"args":{"class":"solr.SynonymFilterFactory","expand":"false","synonyms":"synonyms.txt","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.synonym.SynonymFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_en.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"WordDelimiterFilterFactory":{"args":{"generateNumberParts":"0","catenateWords":"1","class":"solr.WordDelimiterFilterFactory","generateWordParts":"0","luceneMatchVersion":"LUCENE_44","catenateAll":"0","catenateNumbers":"1"},"className":"org.apache.lucene.analysis.miscellaneous.WordDelimiterFilterFactory"},"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"KeywordMarkerFilterFactory":{"args":{"protected":"protwords.txt","class":"solr.KeywordMarkerFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.miscellaneous.KeywordMarkerFilterFactory"},"EnglishMinimalStemFilterFactory":{"args":{"class":"solr.EnglishMinimalStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.en.EnglishMinimalStemFilterFactory"},"RemoveDuplicatesTokenFilterFactory":{"args":{"class":"solr.RemoveDuplicatesTokenFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.miscellaneous.RemoveDuplicatesTokenFilterFactory"}}},"similarity":{}},"text_es":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_es.txt","class":"solr.StopFilterFactory","format":"snowball","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SpanishLightStemFilterFactory":{"args":{"class":"solr.SpanishLightStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.es.SpanishLightStemFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_es.txt","class":"solr.StopFilterFactory","format":"snowball","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SpanishLightStemFilterFactory":{"args":{"class":"solr.SpanishLightStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.es.SpanishLightStemFilterFactory"}}},"similarity":{}},"text_eu":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_eu.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Basque","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_eu.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Basque","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"similarity":{}},"text_fa":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","charFilters":{"PersianCharFilterFactory":{"args":{"class":"solr.PersianCharFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.fa.PersianCharFilterFactory"}},"tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"ArabicNormalizationFilterFactory":{"args":{"class":"solr.ArabicNormalizationFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.ar.ArabicNormalizationFilterFactory"},"PersianNormalizationFilterFactory":{"args":{"class":"solr.PersianNormalizationFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.fa.PersianNormalizationFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_fa.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","charFilters":{"PersianCharFilterFactory":{"args":{"class":"solr.PersianCharFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.fa.PersianCharFilterFactory"}},"tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"ArabicNormalizationFilterFactory":{"args":{"class":"solr.ArabicNormalizationFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.ar.ArabicNormalizationFilterFactory"},"PersianNormalizationFilterFactory":{"args":{"class":"solr.PersianNormalizationFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.fa.PersianNormalizationFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_fa.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"}}},"similarity":{}},"text_fi":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_fi.txt","class":"solr.StopFilterFactory","format":"snowball","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Finnish","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_fi.txt","class":"solr.StopFilterFactory","format":"snowball","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Finnish","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"similarity":{}},"text_fr":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"ElisionFilterFactory":{"args":{"articles":"lang/contractions_fr.txt","class":"solr.ElisionFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.util.ElisionFilterFactory"},"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_fr.txt","class":"solr.StopFilterFactory","format":"snowball","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"FrenchLightStemFilterFactory":{"args":{"class":"solr.FrenchLightStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.fr.FrenchLightStemFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"ElisionFilterFactory":{"args":{"articles":"lang/contractions_fr.txt","class":"solr.ElisionFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.util.ElisionFilterFactory"},"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_fr.txt","class":"solr.StopFilterFactory","format":"snowball","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"FrenchLightStemFilterFactory":{"args":{"class":"solr.FrenchLightStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.fr.FrenchLightStemFilterFactory"}}},"similarity":{}},"text_ga":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"ElisionFilterFactory":{"args":{"articles":"lang/contractions_ga.txt","class":"solr.ElisionFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.util.ElisionFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/hyphenations_ga.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"IrishLowerCaseFilterFactory":{"args":{"class":"solr.IrishLowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.ga.IrishLowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_ga.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Irish","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"ElisionFilterFactory":{"args":{"articles":"lang/contractions_ga.txt","class":"solr.ElisionFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.util.ElisionFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/hyphenations_ga.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"IrishLowerCaseFilterFactory":{"args":{"class":"solr.IrishLowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.ga.IrishLowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_ga.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Irish","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"similarity":{}},"text_general":{"fields":["subject","includes","author","title","description","name","features","text","keywords","resourcename","url","content","category","manu","comments","attr_*","*_txt","*_t"],"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"StopFilterFactory":{"args":{"words":"stopwords.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"StopFilterFactory":{"args":{"words":"stopwords.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SynonymFilterFactory":{"args":{"class":"solr.SynonymFilterFactory","expand":"true","synonyms":"synonyms.txt","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.synonym.SynonymFilterFactory"},"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"}}},"similarity":{}},"text_general_rev":{"fields":["text_rev"],"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"StopFilterFactory":{"args":{"words":"stopwords.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"ReversedWildcardFilterFactory":{"args":{"maxFractionAsterisk":"0.33","withOriginal":"true","maxPosQuestion":"2","class":"solr.ReversedWildcardFilterFactory","maxPosAsterisk":"3","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.solr.analysis.ReversedWildcardFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"SynonymFilterFactory":{"args":{"class":"solr.SynonymFilterFactory","expand":"true","synonyms":"synonyms.txt","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.synonym.SynonymFilterFactory"},"StopFilterFactory":{"args":{"words":"stopwords.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"}}},"similarity":{}},"text_gl":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_gl.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"GalicianStemFilterFactory":{"args":{"class":"solr.GalicianStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.gl.GalicianStemFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_gl.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"GalicianStemFilterFactory":{"args":{"class":"solr.GalicianStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.gl.GalicianStemFilterFactory"}}},"similarity":{}},"text_hi":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"IndicNormalizationFilterFactory":{"args":{"class":"solr.IndicNormalizationFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.in.IndicNormalizationFilterFactory"},"HindiNormalizationFilterFactory":{"args":{"class":"solr.HindiNormalizationFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.hi.HindiNormalizationFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_hi.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"HindiStemFilterFactory":{"args":{"class":"solr.HindiStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.hi.HindiStemFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"IndicNormalizationFilterFactory":{"args":{"class":"solr.IndicNormalizationFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.in.IndicNormalizationFilterFactory"},"HindiNormalizationFilterFactory":{"args":{"class":"solr.HindiNormalizationFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.hi.HindiNormalizationFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_hi.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"HindiStemFilterFactory":{"args":{"class":"solr.HindiStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.hi.HindiStemFilterFactory"}}},"similarity":{}},"text_hu":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_hu.txt","class":"solr.StopFilterFactory","format":"snowball","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Hungarian","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_hu.txt","class":"solr.StopFilterFactory","format":"snowball","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Hungarian","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"similarity":{}},"text_hy":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_hy.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Armenian","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_hy.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Armenian","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"similarity":{}},"text_id":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_id.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"IndonesianStemFilterFactory":{"args":{"class":"solr.IndonesianStemFilterFactory","stemDerivational":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.id.IndonesianStemFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_id.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"IndonesianStemFilterFactory":{"args":{"class":"solr.IndonesianStemFilterFactory","stemDerivational":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.id.IndonesianStemFilterFactory"}}},"similarity":{}},"text_it":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"ElisionFilterFactory":{"args":{"articles":"lang/contractions_it.txt","class":"solr.ElisionFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.util.ElisionFilterFactory"},"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_it.txt","class":"solr.StopFilterFactory","format":"snowball","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"ItalianLightStemFilterFactory":{"args":{"class":"solr.ItalianLightStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.it.ItalianLightStemFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"ElisionFilterFactory":{"args":{"articles":"lang/contractions_it.txt","class":"solr.ElisionFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.util.ElisionFilterFactory"},"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_it.txt","class":"solr.StopFilterFactory","format":"snowball","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"ItalianLightStemFilterFactory":{"args":{"class":"solr.ItalianLightStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.it.ItalianLightStemFilterFactory"}}},"similarity":{}},"text_ja":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.ja.JapaneseTokenizerFactory","args":{"class":"solr.JapaneseTokenizerFactory","luceneMatchVersion":"LUCENE_44","mode":"search"}},"filters":{"JapaneseBaseFormFilterFactory":{"args":{"class":"solr.JapaneseBaseFormFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.ja.JapaneseBaseFormFilterFactory"},"JapanesePartOfSpeechStopFilterFactory":{"args":{"tags":"lang/stoptags_ja.txt","class":"solr.JapanesePartOfSpeechStopFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.ja.JapanesePartOfSpeechStopFilterFactory"},"CJKWidthFilterFactory":{"args":{"class":"solr.CJKWidthFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.cjk.CJKWidthFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_ja.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"JapaneseKatakanaStemFilterFactory":{"args":{"class":"solr.JapaneseKatakanaStemFilterFactory","minimumLength":"4","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.ja.JapaneseKatakanaStemFilterFactory"},"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.ja.JapaneseTokenizerFactory","args":{"class":"solr.JapaneseTokenizerFactory","luceneMatchVersion":"LUCENE_44","mode":"search"}},"filters":{"JapaneseBaseFormFilterFactory":{"args":{"class":"solr.JapaneseBaseFormFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.ja.JapaneseBaseFormFilterFactory"},"JapanesePartOfSpeechStopFilterFactory":{"args":{"tags":"lang/stoptags_ja.txt","class":"solr.JapanesePartOfSpeechStopFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.ja.JapanesePartOfSpeechStopFilterFactory"},"CJKWidthFilterFactory":{"args":{"class":"solr.CJKWidthFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.cjk.CJKWidthFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_ja.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"JapaneseKatakanaStemFilterFactory":{"args":{"class":"solr.JapaneseKatakanaStemFilterFactory","minimumLength":"4","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.ja.JapaneseKatakanaStemFilterFactory"},"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"}}},"similarity":{}},"text_lv":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_lv.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"LatvianStemFilterFactory":{"args":{"class":"solr.LatvianStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.lv.LatvianStemFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_lv.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"LatvianStemFilterFactory":{"args":{"class":"solr.LatvianStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.lv.LatvianStemFilterFactory"}}},"similarity":{}},"text_nl":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_nl.txt","class":"solr.StopFilterFactory","format":"snowball","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"StemmerOverrideFilterFactory":{"args":{"class":"solr.StemmerOverrideFilterFactory","dictionary":"lang/stemdict_nl.txt","ignoreCase":"false","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.miscellaneous.StemmerOverrideFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Dutch","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_nl.txt","class":"solr.StopFilterFactory","format":"snowball","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"StemmerOverrideFilterFactory":{"args":{"class":"solr.StemmerOverrideFilterFactory","dictionary":"lang/stemdict_nl.txt","ignoreCase":"false","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.miscellaneous.StemmerOverrideFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Dutch","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"similarity":{}},"text_no":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_no.txt","class":"solr.StopFilterFactory","format":"snowball","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Norwegian","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_no.txt","class":"solr.StopFilterFactory","format":"snowball","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Norwegian","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"similarity":{}},"text_pt":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_pt.txt","class":"solr.StopFilterFactory","format":"snowball","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"PortugueseLightStemFilterFactory":{"args":{"class":"solr.PortugueseLightStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.pt.PortugueseLightStemFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_pt.txt","class":"solr.StopFilterFactory","format":"snowball","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"PortugueseLightStemFilterFactory":{"args":{"class":"solr.PortugueseLightStemFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.pt.PortugueseLightStemFilterFactory"}}},"similarity":{}},"text_ro":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_ro.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Romanian","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_ro.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Romanian","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"similarity":{}},"text_ru":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_ru.txt","class":"solr.StopFilterFactory","format":"snowball","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Russian","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_ru.txt","class":"solr.StopFilterFactory","format":"snowball","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Russian","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"similarity":{}},"text_sv":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_sv.txt","class":"solr.StopFilterFactory","format":"snowball","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Swedish","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_sv.txt","class":"solr.StopFilterFactory","format":"snowball","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Swedish","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"similarity":{}},"text_th":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"ThaiWordFilterFactory":{"args":{"class":"solr.ThaiWordFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.th.ThaiWordFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_th.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"LowerCaseFilterFactory":{"args":{"class":"solr.LowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.LowerCaseFilterFactory"},"ThaiWordFilterFactory":{"args":{"class":"solr.ThaiWordFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.th.ThaiWordFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_th.txt","class":"solr.StopFilterFactory","ignoreCase":"true","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"}}},"similarity":{}},"text_tr":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"TurkishLowerCaseFilterFactory":{"args":{"class":"solr.TurkishLowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.tr.TurkishLowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_tr.txt","class":"solr.StopFilterFactory","ignoreCase":"false","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Turkish","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.standard.StandardTokenizerFactory","args":{"class":"solr.StandardTokenizerFactory","luceneMatchVersion":"LUCENE_44"}},"filters":{"TurkishLowerCaseFilterFactory":{"args":{"class":"solr.TurkishLowerCaseFilterFactory","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.tr.TurkishLowerCaseFilterFactory"},"StopFilterFactory":{"args":{"words":"lang/stopwords_tr.txt","class":"solr.StopFilterFactory","ignoreCase":"false","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.core.StopFilterFactory"},"SnowballPorterFilterFactory":{"args":{"class":"solr.SnowballPorterFilterFactory","language":"Turkish","luceneMatchVersion":"LUCENE_44"},"className":"org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory"}}},"similarity":{}},"text_ws":{"fields":null,"tokenized":true,"className":"org.apache.solr.schema.TextField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.core.WhitespaceTokenizerFactory","args":{"class":"solr.WhitespaceTokenizerFactory","luceneMatchVersion":"LUCENE_44"}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.lucene.analysis.core.WhitespaceTokenizerFactory","args":{"class":"solr.WhitespaceTokenizerFactory","luceneMatchVersion":"LUCENE_44"}}},"similarity":{}},"tfloat":{"fields":["*_tf"],"tokenized":true,"className":"org.apache.solr.schema.TrieFloatField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.solr.analysis.TrieTokenizerFactory","args":{}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.solr.analysis.TrieTokenizerFactory","args":{}}},"similarity":{}},"tint":{"fields":["*_ti"],"tokenized":true,"className":"org.apache.solr.schema.TrieIntField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.solr.analysis.TrieTokenizerFactory","args":{}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.solr.analysis.TrieTokenizerFactory","args":{}}},"similarity":{}},"tlong":{"fields":["*_tl"],"tokenized":true,"className":"org.apache.solr.schema.TrieLongField","indexAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.solr.analysis.TrieTokenizerFactory","args":{}}},"queryAnalyzer":{"className":"org.apache.solr.analysis.TokenizerChain","tokenizer":{"className":"org.apache.solr.analysis.TrieTokenizerFactory","args":{}}},"similarity":{}}}},"info":{"key":{"I":"Indexed","T":"Tokenized","S":"Stored","D":"DocValues","M":"Multivalued","V":"TermVector Stored","o":"Store Offset With TermVector","p":"Store Position With TermVector","O":"Omit Norms","F":"Omit Term Frequencies & Positions","P":"Omit Positions","H":"Store Offsets with Positions","L":"Lazy","B":"Binary","f":"Sort Missing First","l":"Sort Missing Last"},"NOTE":"Document Frequency (df) is not updated when a document is marked for deletion. df values include deleted documents."}}"""
SOLR_LUKE_ = """{"responseHeader":{"status":0,"QTime":5},"index":{"numDocs":8,"maxDoc":8,"deletedDocs":0,"version":15,"segmentCount":5,"current":true,"hasDeletions":false,"directory":"org.apache.lucene.store.NRTCachingDirectory:NRTCachingDirectory(org.apache.solr.store.hdfs.HdfsDirectory@5efe087b lockFactory=org.apache.solr.store.hdfs.HdfsLockFactory@5106def2; maxCacheMB=192.0 maxMergeSizeMB=16.0)","userData":{"commitTimeMSec":"1389233070579"},"lastModified":"2014-01-09T02:04:30.579Z"},"fields":{"_version_":{"type":"long","schema":"ITS-----OF------","index":"-TS-------------","docs":8,"distinct":8,"topTerms":["1456716393276768256",1,"1456716398067712000",1,"1456716401465098240",1,"1460689159964327936",1,"1460689159981105152",1,"1460689159988445184",1,"1460689159993688064",1,"1456716273606983680",1],"histogram":["1",8]},"cat":{"type":"string","schema":"I-S-M---OF-----l","index":"ITS-----OF------","docs":4,"distinct":1,"topTerms":["currency",4],"histogram":["1",0,"2",0,"4",1]},"features":{"type":"text_general","schema":"ITS-M-----------","index":"ITS-------------","docs":4,"distinct":3,"topTerms":["coins",4,"notes",4,"and",4],"histogram":["1",0,"2",0,"4",3]},"id":{"type":"string","schema":"I-S-----OF-----l","index":"ITS-----OF------","docs":8,"distinct":8,"topTerms":["GBP",1,"NOK",1,"USD",1,"change.me",1,"change.me1",1,"change.me112",1,"change.me12",1,"EUR",1],"histogram":["1",8]},"inStock":{"type":"boolean","schema":"I-S-----OF-----l","index":"ITS-----OF------","docs":4,"distinct":1,"topTerms":["true",4],"histogram":["1",0,"2",0,"4",1]},"manu":{"type":"text_general","schema":"ITS-----O-------","index":"ITS-----O-------","docs":4,"distinct":7,"topTerms":["of",2,"bank",2,"european",1,"norway",1,"u.k",1,"union",1,"america",1],"histogram":["1",5,"2",2]},"manu_exact":{"type":"string","schema":"I-------OF-----l","index":"(unstored field)","docs":4,"distinct":4,"topTerms":["Bank of Norway",1,"European Union",1,"U.K.",1,"Bank of America",1],"histogram":["1",4]},"manu_id_s":{"type":"string","schema":"I-S-----OF-----l","dynamicBase":"*_s","index":"ITS-----OF------","docs":4,"distinct":4,"topTerms":["eu",1,"nor",1,"uk",1,"boa",1],"histogram":["1",4]},"name":{"type":"text_general","schema":"ITS-------------","index":"ITS-------------","docs":4,"distinct":6,"topTerms":["one",4,"euro",1,"krone",1,"dollar",1,"pound",1,"british",1],"histogram":["1",5,"2",0,"4",1]},"price_c":{"type":"currency","schema":"I-S------F------","dynamicBase":"*_c"},"price_c____amount_raw":{"type":"amount_raw_type_tlong","schema":"IT------O-------","dynamicBase":"*____amount_raw","index":"(unstored field)","docs":4,"distinct":8,"topTerms":["0",4,"0",4,"0",4,"0",4,"0",4,"0",4,"0",4,"100",4],"histogram":["1",0,"2",0,"4",8]},"price_c____currency":{"type":"currency_type_string","schema":"I-------O-------","dynamicBase":"*____currency","index":"(unstored field)","docs":4,"distinct":4,"topTerms":["GBP",1,"NOK",1,"USD",1,"EUR",1],"histogram":["1",4]},"romain_t":{"type":"text_general","schema":"ITS-------------","dynamicBase":"*_t","index":"ITS-------------","docs":1,"distinct":1,"topTerms":["true",1],"histogram":["1",1]},"text":{"type":"text_general","schema":"IT--M-----------","index":"(unstored field)","docs":8,"distinct":21,"topTerms":["and",4,"currency",4,"notes",4,"one",4,"coins",4,"bank",2,"of",2,"change.me112",1,"change.me1",1,"change.me",1],"histogram":["1",14,"2",2,"4",5]},"title":{"type":"text_general","schema":"ITS-M-----------","index":"ITS-------------","docs":4,"distinct":4,"topTerms":["change.me1",1,"change.me112",1,"change.me12",1,"change.me",1],"histogram":["1",4]}},"info":{"key":{"I":"Indexed","T":"Tokenized","S":"Stored","D":"DocValues","M":"Multivalued","V":"TermVector Stored","o":"Store Offset With TermVector","p":"Store Position With TermVector","O":"Omit Norms","F":"Omit Term Frequencies & Positions","P":"Omit Positions","H":"Store Offsets with Positions","L":"Lazy","B":"Binary","f":"Sort Missing First","l":"Sort Missing Last"},"NOTE":"Document Frequency (df) is not updated when a document is marked for deletion. df values include deleted documents."}}"""
SOLR_SCHEMA = """
<?xml version="1.0" encoding="UTF-8" ?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!--
This is the Solr schema file. This file should be named "schema.xml" and
should be in the conf directory under the solr home
(i.e. ./solr/conf/schema.xml by default)
or located where the classloader for the Solr webapp can find it.
This example schema is the recommended starting point for users.
It should be kept correct and concise, usable out-of-the-box.
For more information, on how to customize this file, please see
http://wiki.apache.org/solr/SchemaXml
PERFORMANCE NOTE: this schema includes many optional features and should not
be used for benchmarking. To improve performance one could
- set stored="false" for all fields possible (esp large fields) when you
only need to search on the field but don't need to return the original
value.
- set indexed="false" if you don't need to search on the field, but only
return the field as a result of searching on other indexed fields.
- remove all unneeded copyField statements
- for best index size and searching performance, set "index" to false
for all general text fields, use copyField to copy them to the
catchall "text" field, and use that for searching.
- For maximum indexing performance, use the StreamingUpdateSolrServer
java client.
- Remember to run the JVM in server mode, and use a higher logging level
that avoids logging every request
-->
<schema name="example" version="1.5">
<!-- attribute "name" is the name of this schema and is only used for display purposes.
version="x.y" is Solr's version number for the schema syntax and
semantics. It should not normally be changed by applications.
1.0: multiValued attribute did not exist, all fields are multiValued
by nature
1.1: multiValued attribute introduced, false by default
1.2: omitTermFreqAndPositions attribute introduced, true by default
except for text fields.
1.3: removed optional field compress feature
1.4: autoGeneratePhraseQueries attribute introduced to drive QueryParser
behavior when a single string produces multiple tokens. Defaults
to off for version >= 1.4
1.5: omitNorms defaults to true for primitive field types
(int, float, boolean, string...)
-->
<fields>
<!-- Valid attributes for fields:
name: mandatory - the name for the field
type: mandatory - the name of a field type from the
<types> fieldType section
indexed: true if this field should be indexed (searchable or sortable)
stored: true if this field should be retrievable
docValues: true if this field should have doc values. Doc values are
useful for faceting, grouping, sorting and function queries. Although not
required, doc values will make the index faster to load, more
NRT-friendly and more memory-efficient. They however come with some
limitations: they are currently only supported by StrField, UUIDField
and all Trie*Fields, and depending on the field type, they might
require the field to be single-valued, be required or have a default
value (check the documentation of the field type you're interested in
for more information)
multiValued: true if this field may contain multiple values per document
omitNorms: (expert) set to true to omit the norms associated with
this field (this disables length normalization and index-time
boosting for the field, and saves some memory). Only full-text
fields or fields that need an index-time boost need norms.
Norms are omitted for primitive (non-analyzed) types by default.
termVectors: [false] set to true to store the term vector for a
given field.
When using MoreLikeThis, fields used for similarity should be
stored for best performance.
termPositions: Store position information with the term vector.
This will increase storage costs.
termOffsets: Store offset information with the term vector. This
will increase storage costs.
required: The field is required. It will throw an error if the
value does not exist
default: a value that should be used if no value is specified
when adding a document.
-->
<!-- field names should consist of alphanumeric or underscore characters only and
not start with a digit. This is not currently strictly enforced,
but other field names will not have first class support from all components
and back compatibility is not guaranteed. Names with both leading and
trailing underscores (e.g. _version_) are reserved.
-->
<field name="id" type="string" indexed="true" stored="true" required="true" multiValued="false" />
<field name="sku" type="text_en_splitting_tight" indexed="true" stored="true" omitNorms="true"/>
<field name="name" type="text_general" indexed="true" stored="true"/>
<field name="manu" type="text_general" indexed="true" stored="true" omitNorms="true"/>
<field name="cat" type="string" indexed="true" stored="true" multiValued="true"/>
<field name="features" type="text_general" indexed="true" stored="true" multiValued="true"/>
<field name="includes" type="text_general" indexed="true" stored="true" termVectors="true" termPositions="true" termOffsets="true" />
<field name="weight" type="float" indexed="true" stored="true"/>
<field name="price" type="float" indexed="true" stored="true"/>
<field name="popularity" type="int" indexed="true" stored="true" />
<field name="inStock" type="boolean" indexed="true" stored="true" />
<field name="store" type="location" indexed="true" stored="true"/>
<!-- Common metadata fields, named specifically to match up with
SolrCell metadata when parsing rich documents such as Word, PDF.
Some fields are multiValued only because Tika currently may return
multiple values for them. Some metadata is parsed from the documents,
but there are some which come from the client context:
"content_type": From the HTTP headers of incoming stream
"resourcename": From SolrCell request param resource.name
-->
<field name="title" type="text_general" indexed="true" stored="true" multiValued="true"/>
<field name="subject" type="text_general" indexed="true" stored="true"/>
<field name="description" type="text_general" indexed="true" stored="true"/>
<field name="comments" type="text_general" indexed="true" stored="true"/>
<field name="author" type="text_general" indexed="true" stored="true"/>
<field name="keywords" type="text_general" indexed="true" stored="true"/>
<field name="category" type="text_general" indexed="true" stored="true"/>
<field name="resourcename" type="text_general" indexed="true" stored="true"/>
<field name="url" type="text_general" indexed="true" stored="true"/>
<field name="content_type" type="string" indexed="true" stored="true" multiValued="true"/>
<field name="last_modified" type="date" indexed="true" stored="true"/>
<field name="links" type="string" indexed="true" stored="true" multiValued="true"/>
<!-- Main body of document extracted by SolrCell.
NOTE: This field is not indexed by default, since it is also copied to "text"
using copyField below. This is to save space. Use this field for returning and
highlighting document content. Use the "text" field to search the content. -->
<field name="content" type="text_general" indexed="false" stored="true" multiValued="true"/>
<!-- catchall field, containing all other searchable text fields (implemented
via copyField further on in this schema -->
<field name="text" type="text_general" indexed="true" stored="false" multiValued="true"/>
<!-- catchall text field that indexes tokens both normally and in reverse for efficient
leading wildcard queries. -->
<field name="text_rev" type="text_general_rev" indexed="true" stored="false" multiValued="true"/>
<!-- non-tokenized version of manufacturer to make it easier to sort or group
results by manufacturer. copied from "manu" via copyField -->
<field name="manu_exact" type="string" indexed="true" stored="false"/>
<field name="payloads" type="payloads" indexed="true" stored="true"/>
<field name="_version_" type="long" indexed="true" stored="true"/>
<!--
Some fields such as popularity and manu_exact could be modified to
leverage doc values:
<field name="popularity" type="int" indexed="true" stored="true" docValues="true" default="0" />
<field name="manu_exact" type="string" indexed="false" stored="false" docValues="true" default="" />
Although it would make indexing slightly slower and the index bigger, it
would also make the index faster to load, more memory-efficient and more
NRT-friendly.
-->
<!-- Dynamic field definitions allow using convention over configuration
for fields via the specification of patterns to match field names.
EXAMPLE: name="*_i" will match any field ending in _i (like myid_i, z_i)
RESTRICTION: the glob-like pattern in the name attribute must have
a "*" only at the start or the end. -->
<dynamicField name="*_i" type="int" indexed="true" stored="true"/>
<dynamicField name="*_is" type="int" indexed="true" stored="true" multiValued="true"/>
<dynamicField name="*_s" type="string" indexed="true" stored="true" />
<dynamicField name="*_ss" type="string" indexed="true" stored="true" multiValued="true"/>
<dynamicField name="*_l" type="long" indexed="true" stored="true"/>
<dynamicField name="*_ls" type="long" indexed="true" stored="true" multiValued="true"/>
<dynamicField name="*_t" type="text_general" indexed="true" stored="true"/>
<dynamicField name="*_txt" type="text_general" indexed="true" stored="true" multiValued="true"/>
<dynamicField name="*_en" type="text_en" indexed="true" stored="true" multiValued="true"/>
<dynamicField name="*_b" type="boolean" indexed="true" stored="true"/>
<dynamicField name="*_bs" type="boolean" indexed="true" stored="true" multiValued="true"/>
<dynamicField name="*_f" type="float" indexed="true" stored="true"/>
<dynamicField name="*_fs" type="float" indexed="true" stored="true" multiValued="true"/>
<dynamicField name="*_d" type="double" indexed="true" stored="true"/>
<dynamicField name="*_ds" type="double" indexed="true" stored="true" multiValued="true"/>
<!-- Type used to index the lat and lon components for the "location" FieldType -->
<dynamicField name="*_coordinate" type="tdouble" indexed="true" stored="false" />
<dynamicField name="*_dt" type="date" indexed="true" stored="true"/>
<dynamicField name="*_dts" type="date" indexed="true" stored="true" multiValued="true"/>
<dynamicField name="*_p" type="location" indexed="true" stored="true"/>
<!-- some trie-coded dynamic fields for faster range queries -->
<dynamicField name="*_ti" type="tint" indexed="true" stored="true"/>
<dynamicField name="*_tl" type="tlong" indexed="true" stored="true"/>
<dynamicField name="*_tf" type="tfloat" indexed="true" stored="true"/>
<dynamicField name="*_td" type="tdouble" indexed="true" stored="true"/>
<dynamicField name="*_tdt" type="tdate" indexed="true" stored="true"/>
<dynamicField name="*_pi" type="pint" indexed="true" stored="true"/>
<dynamicField name="*_c" type="currency" indexed="true" stored="true"/>
<dynamicField name="ignored_*" type="ignored" multiValued="true"/>
<dynamicField name="attr_*" type="text_general" indexed="true" stored="true" multiValued="true"/>
<dynamicField name="random_*" type="random" />
<!-- uncomment the following to ignore any fields that don't already match an existing
field name or dynamic field, rather than reporting them as an error.
alternately, change the type="ignored" to some other type e.g. "text" if you want
unknown fields indexed and/or stored by default -->
<!--dynamicField name="*" type="ignored" multiValued="true" /-->
</fields>
<!-- Field to use to determine and enforce document uniqueness.
Unless this field is marked with required="false", it will be a required field
-->
<uniqueKey>id</uniqueKey>
<!-- DEPRECATED: The defaultSearchField is consulted by various query parsers when
parsing a query string that isn't explicit about the field. Machine (non-user)
generated queries are best made explicit, or they can use the "df" request parameter
which takes precedence over this.
Note: Un-commenting defaultSearchField will be insufficient if your request handler
in solrconfig.xml defines "df", which takes precedence. That would need to be removed.
<defaultSearchField>text</defaultSearchField> -->
<!-- DEPRECATED: The defaultOperator (AND|OR) is consulted by various query parsers
when parsing a query string to determine if a clause of the query should be marked as
required or optional, assuming the clause isn't already marked by some operator.
The default is OR, which is generally assumed so it is not a good idea to change it
globally here. The "q.op" request parameter takes precedence over this.
<solrQueryParser defaultOperator="OR"/> -->
<!-- copyField commands copy one field to another at the time a document
is added to the index. It's used either to index the same field differently,
or to add multiple fields to the same field for easier/faster searching. -->
<copyField source="cat" dest="text"/>
<copyField source="name" dest="text"/>
<copyField source="manu" dest="text"/>
<copyField source="features" dest="text"/>
<copyField source="includes" dest="text"/>
<copyField source="manu" dest="manu_exact"/>
<!-- Copy the price into a currency enabled field (default USD) -->
<copyField source="price" dest="price_c"/>
<!-- Text fields from SolrCell to search by default in our catch-all field -->
<copyField source="title" dest="text"/>
<copyField source="author" dest="text"/>
<copyField source="description" dest="text"/>
<copyField source="keywords" dest="text"/>
<copyField source="content" dest="text"/>
<copyField source="content_type" dest="text"/>
<copyField source="resourcename" dest="text"/>
<copyField source="url" dest="text"/>
<!-- Create a string version of author for faceting -->
<copyField source="author" dest="author_s"/>
<!-- Above, multiple source fields are copied to the [text] field.
Another way to map multiple source fields to the same
destination field is to use the dynamic field syntax.
copyField also supports a maxChars to copy setting. -->
<!-- <copyField source="*_t" dest="text" maxChars="3000"/> -->
<!-- copy name to alphaNameSort, a field designed for sorting by name -->
<!-- <copyField source="name" dest="alphaNameSort"/> -->
<types>
<!-- field type definitions. The "name" attribute is
just a label to be used by field definitions. The "class"
attribute and any other attributes determine the real
behavior of the fieldType.
Class names starting with "solr" refer to java classes in a
standard package such as org.apache.solr.analysis
-->
<!-- The StrField type is not analyzed, but indexed/stored verbatim.
It supports doc values but in that case the field needs to be
single-valued and either required or have a default value.
-->
<fieldType name="string" class="solr.StrField" sortMissingLast="true" />
<!-- boolean type: "true" or "false" -->
<fieldType name="boolean" class="solr.BoolField" sortMissingLast="true"/>
<!-- sortMissingLast and sortMissingFirst attributes are optional attributes are
currently supported on types that are sorted internally as strings
and on numeric types.
This includes "string","boolean", and, as of 3.5 (and 4.x),
int, float, long, date, double, including the "Trie" variants.
- If sortMissingLast="true", then a sort on this field will cause documents
without the field to come after documents with the field,
regardless of the requested sort order (asc or desc).
- If sortMissingFirst="true", then a sort on this field will cause documents
without the field to come before documents with the field,
regardless of the requested sort order.
- If sortMissingLast="false" and sortMissingFirst="false" (the default),
then default lucene sorting will be used which places docs without the
field first in an ascending sort and last in a descending sort.
-->
<!--
Default numeric field types. For faster range queries, consider the tint/tfloat/tlong/tdouble types.
These fields support doc values, but they require the field to be
single-valued and either be required or have a default value.
-->
<fieldType name="int" class="solr.TrieIntField" precisionStep="0" positionIncrementGap="0"/>
<fieldType name="float" class="solr.TrieFloatField" precisionStep="0" positionIncrementGap="0"/>
<fieldType name="long" class="solr.TrieLongField" precisionStep="0" positionIncrementGap="0"/>
<fieldType name="double" class="solr.TrieDoubleField" precisionStep="0" positionIncrementGap="0"/>
<!--
Numeric field types that index each value at various levels of precision
to accelerate range queries when the number of values between the range
endpoints is large. See the javadoc for NumericRangeQuery for internal
implementation details.
Smaller precisionStep values (specified in bits) will lead to more tokens
indexed per value, slightly larger index size, and faster range queries.
A precisionStep of 0 disables indexing at different precision levels.
-->
<fieldType name="tint" class="solr.TrieIntField" precisionStep="8" positionIncrementGap="0"/>
<fieldType name="tfloat" class="solr.TrieFloatField" precisionStep="8" positionIncrementGap="0"/>
<fieldType name="tlong" class="solr.TrieLongField" precisionStep="8" positionIncrementGap="0"/>
<fieldType name="tdouble" class="solr.TrieDoubleField" precisionStep="8" positionIncrementGap="0"/>
<!-- The format for this date field is of the form 1995-12-31T23:59:59Z, and
is a more restricted form of the canonical representation of dateTime
http://www.w3.org/TR/xmlschema-2/#dateTime
The trailing "Z" designates UTC time and is mandatory.
Optional fractional seconds are allowed: 1995-12-31T23:59:59.999Z
All other components are mandatory.
Expressions can also be used to denote calculations that should be
performed relative to "NOW" to determine the value, ie...
NOW/HOUR
... Round to the start of the current hour
NOW-1DAY
... Exactly 1 day prior to now
NOW/DAY+6MONTHS+3DAYS
... 6 months and 3 days in the future from the start of
the current day
Consult the DateField javadocs for more information.
Note: For faster range queries, consider the tdate type
-->
<fieldType name="date" class="solr.TrieDateField" precisionStep="0" positionIncrementGap="0"/>
<!-- A Trie based date field for faster date range queries and date faceting. -->
<fieldType name="tdate" class="solr.TrieDateField" precisionStep="6" positionIncrementGap="0"/>
<!--Binary data type. The data should be sent/retrieved in as Base64 encoded Strings -->
<fieldtype name="binary" class="solr.BinaryField"/>
<!--
Note:
These should only be used for compatibility with existing indexes (created with lucene or older Solr versions).
Use Trie based fields instead. As of Solr 3.5 and 4.x, Trie based fields support sortMissingFirst/Last
Plain numeric field types that store and index the text
value verbatim (and hence don't correctly support range queries, since the
lexicographic ordering isn't equal to the numeric ordering)
-->
<fieldType name="pint" class="solr.IntField"/>
<fieldType name="plong" class="solr.LongField"/>
<fieldType name="pfloat" class="solr.FloatField"/>
<fieldType name="pdouble" class="solr.DoubleField"/>
<fieldType name="pdate" class="solr.DateField" sortMissingLast="true"/>
<!-- The "RandomSortField" is not used to store or search any
data. You can declare fields of this type it in your schema
to generate pseudo-random orderings of your docs for sorting
or function purposes. The ordering is generated based on the field
name and the version of the index. As long as the index version
remains unchanged, and the same field name is reused,
the ordering of the docs will be consistent.
If you want different psuedo-random orderings of documents,
for the same version of the index, use a dynamicField and
change the field name in the request.
-->
<fieldType name="random" class="solr.RandomSortField" indexed="true" />
<!-- solr.TextField allows the specification of custom text analyzers
specified as a tokenizer and a list of token filters. Different
analyzers may be specified for indexing and querying.
The optional positionIncrementGap puts space between multiple fields of
this type on the same document, with the purpose of preventing false phrase
matching across fields.
For more info on customizing your analyzer chain, please see
http://wiki.apache.org/solr/AnalyzersTokenizersTokenFilters
-->
<!-- One can also specify an existing Analyzer class that has a
default constructor via the class attribute on the analyzer element.
Example:
<fieldType name="text_greek" class="solr.TextField">
<analyzer class="org.apache.lucene.analysis.el.GreekAnalyzer"/>
</fieldType>
-->
<!-- A text field that only splits on whitespace for exact matching of words -->
<fieldType name="text_ws" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.WhitespaceTokenizerFactory"/>
</analyzer>
</fieldType>
<!-- A general text field that has reasonable, generic
cross-language defaults: it tokenizes with StandardTokenizer,
removes stop words from case-insensitive "stopwords.txt"
(empty by default), and down cases. At query time only, it
also applies synonyms. -->
<fieldType name="text_general" class="solr.TextField" positionIncrementGap="100">
<analyzer type="index">
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt" />
<!-- in this example, we will only use synonyms at query time
<filter class="solr.SynonymFilterFactory" synonyms="index_synonyms.txt" ignoreCase="true" expand="false"/>
-->
<filter class="solr.LowerCaseFilterFactory"/>
</analyzer>
<analyzer type="query">
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt" />
<filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
<filter class="solr.LowerCaseFilterFactory"/>
</analyzer>
</fieldType>
<!-- A text field with defaults appropriate for English: it
tokenizes with StandardTokenizer, removes English stop words
(lang/stopwords_en.txt), down cases, protects words from protwords.txt, and
finally applies Porter's stemming. The query time analyzer
also applies synonyms from synonyms.txt. -->
<fieldType name="text_en" class="solr.TextField" positionIncrementGap="100">
<analyzer type="index">
<tokenizer class="solr.StandardTokenizerFactory"/>
<!-- in this example, we will only use synonyms at query time
<filter class="solr.SynonymFilterFactory" synonyms="index_synonyms.txt" ignoreCase="true" expand="false"/>
-->
<!-- Case insensitive stop word removal.
-->
<filter class="solr.StopFilterFactory"
ignoreCase="true"
words="lang/stopwords_en.txt"
/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.EnglishPossessiveFilterFactory"/>
<filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
<!-- Optionally you may want to use this less aggressive stemmer instead of PorterStemFilterFactory:
<filter class="solr.EnglishMinimalStemFilterFactory"/>
-->
<filter class="solr.PorterStemFilterFactory"/>
</analyzer>
<analyzer type="query">
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
<filter class="solr.StopFilterFactory"
ignoreCase="true"
words="lang/stopwords_en.txt"
/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.EnglishPossessiveFilterFactory"/>
<filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
<!-- Optionally you may want to use this less aggressive stemmer instead of PorterStemFilterFactory:
<filter class="solr.EnglishMinimalStemFilterFactory"/>
-->
<filter class="solr.PorterStemFilterFactory"/>
</analyzer>
</fieldType>
<!-- A text field with defaults appropriate for English, plus
aggressive word-splitting and autophrase features enabled.
This field is just like text_en, except it adds
WordDelimiterFilter to enable splitting and matching of
words on case-change, alpha numeric boundaries, and
non-alphanumeric chars. This means certain compound word
cases will work, for example query "wi fi" will match
document "WiFi" or "wi-fi".
-->
<fieldType name="text_en_splitting" class="solr.TextField" positionIncrementGap="100" autoGeneratePhraseQueries="true">
<analyzer type="index">
<tokenizer class="solr.WhitespaceTokenizerFactory"/>
<!-- in this example, we will only use synonyms at query time
<filter class="solr.SynonymFilterFactory" synonyms="index_synonyms.txt" ignoreCase="true" expand="false"/>
-->
<!-- Case insensitive stop word removal.
-->
<filter class="solr.StopFilterFactory"
ignoreCase="true"
words="lang/stopwords_en.txt"
/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="1" catenateNumbers="1" catenateAll="0" splitOnCaseChange="1"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
<filter class="solr.PorterStemFilterFactory"/>
</analyzer>
<analyzer type="query">
<tokenizer class="solr.WhitespaceTokenizerFactory"/>
<filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
<filter class="solr.StopFilterFactory"
ignoreCase="true"
words="lang/stopwords_en.txt"
/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="0" catenateNumbers="0" catenateAll="0" splitOnCaseChange="1"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
<filter class="solr.PorterStemFilterFactory"/>
</analyzer>
</fieldType>
<!-- Less flexible matching, but less false matches. Probably not ideal for product names,
but may be good for SKUs. Can insert dashes in the wrong place and still match. -->
<fieldType name="text_en_splitting_tight" class="solr.TextField" positionIncrementGap="100" autoGeneratePhraseQueries="true">
<analyzer>
<tokenizer class="solr.WhitespaceTokenizerFactory"/>
<filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="false"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_en.txt"/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="0" generateNumberParts="0" catenateWords="1" catenateNumbers="1" catenateAll="0"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
<filter class="solr.EnglishMinimalStemFilterFactory"/>
<!-- this filter can remove any duplicate tokens that appear at the same position - sometimes
possible with WordDelimiterFilter in conjuncton with stemming. -->
<filter class="solr.RemoveDuplicatesTokenFilterFactory"/>
</analyzer>
</fieldType>
<!-- Just like text_general except it reverses the characters of
each token, to enable more efficient leading wildcard queries. -->
<fieldType name="text_general_rev" class="solr.TextField" positionIncrementGap="100">
<analyzer type="index">
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt" />
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.ReversedWildcardFilterFactory" withOriginal="true"
maxPosAsterisk="3" maxPosQuestion="2" maxFractionAsterisk="0.33"/>
</analyzer>
<analyzer type="query">
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt" />
<filter class="solr.LowerCaseFilterFactory"/>
</analyzer>
</fieldType>
<!-- charFilter + WhitespaceTokenizer -->
<!--
<fieldType name="text_char_norm" class="solr.TextField" positionIncrementGap="100" >
<analyzer>
<charFilter class="solr.MappingCharFilterFactory" mapping="mapping-ISOLatin1Accent.txt"/>
<tokenizer class="solr.WhitespaceTokenizerFactory"/>
</analyzer>
</fieldType>
-->
<!-- This is an example of using the KeywordTokenizer along
With various TokenFilterFactories to produce a sortable field
that does not include some properties of the source text
-->
<fieldType name="alphaOnlySort" class="solr.TextField" sortMissingLast="true" omitNorms="true">
<analyzer>
<!-- KeywordTokenizer does no actual tokenizing, so the entire
input string is preserved as a single token
-->
<tokenizer class="solr.KeywordTokenizerFactory"/>
<!-- The LowerCase TokenFilter does what you expect, which can be
when you want your sorting to be case insensitive
-->
<filter class="solr.LowerCaseFilterFactory" />
<!-- The TrimFilter removes any leading or trailing whitespace -->
<filter class="solr.TrimFilterFactory" />
<!-- The PatternReplaceFilter gives you the flexibility to use
Java Regular expression to replace any sequence of characters
matching a pattern with an arbitrary replacement string,
which may include back references to portions of the original
string matched by the pattern.
See the Java Regular Expression documentation for more
information on pattern and replacement string syntax.
http://java.sun.com/j2se/1.6.0/docs/api/java/util/regex/package-summary.html
-->
<filter class="solr.PatternReplaceFilterFactory"
pattern="([^a-z])" replacement="" replace="all"
/>
</analyzer>
</fieldType>
<fieldtype name="phonetic" stored="false" indexed="true" class="solr.TextField" >
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.DoubleMetaphoneFilterFactory" inject="false"/>
</analyzer>
</fieldtype>
<fieldtype name="payloads" stored="false" indexed="true" class="solr.TextField" >
<analyzer>
<tokenizer class="solr.WhitespaceTokenizerFactory"/>
<!--
The DelimitedPayloadTokenFilter can put payloads on tokens... for example,
a token of "foo|1.4" would be indexed as "foo" with a payload of 1.4f
Attributes of the DelimitedPayloadTokenFilterFactory :
"delimiter" - a one character delimiter. Default is | (pipe)
"encoder" - how to encode the following value into a playload
float -> org.apache.lucene.analysis.payloads.FloatEncoder,
integer -> o.a.l.a.p.IntegerEncoder
identity -> o.a.l.a.p.IdentityEncoder
Fully Qualified class name implementing PayloadEncoder, Encoder must have a no arg constructor.
-->
<filter class="solr.DelimitedPayloadTokenFilterFactory" encoder="float"/>
</analyzer>
</fieldtype>
<!-- lowercases the entire field value, keeping it as a single token. -->
<fieldType name="lowercase" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.KeywordTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory" />
</analyzer>
</fieldType>
<!--
Example of using PathHierarchyTokenizerFactory at index time, so
queries for paths match documents at that path, or in descendent paths
-->
<fieldType name="descendent_path" class="solr.TextField">
<analyzer type="index">
<tokenizer class="solr.PathHierarchyTokenizerFactory" delimiter="/" />
</analyzer>
<analyzer type="query">
<tokenizer class="solr.KeywordTokenizerFactory" />
</analyzer>
</fieldType>
<!--
Example of using PathHierarchyTokenizerFactory at query time, so
queries for paths match documents at that path, or in ancestor paths
-->
<fieldType name="ancestor_path" class="solr.TextField">
<analyzer type="index">
<tokenizer class="solr.KeywordTokenizerFactory" />
</analyzer>
<analyzer type="query">
<tokenizer class="solr.PathHierarchyTokenizerFactory" delimiter="/" />
</analyzer>
</fieldType>
<!-- since fields of this type are by default not stored or indexed,
any data added to them will be ignored outright. -->
<fieldtype name="ignored" stored="false" indexed="false" multiValued="true" class="solr.StrField" />
<!-- This point type indexes the coordinates as separate fields (subFields)
If subFieldType is defined, it references a type, and a dynamic field
definition is created matching *___<typename>. Alternately, if
subFieldSuffix is defined, that is used to create the subFields.
Example: if subFieldType="double", then the coordinates would be
indexed in fields myloc_0___double,myloc_1___double.
Example: if subFieldSuffix="_d" then the coordinates would be indexed
in fields myloc_0_d,myloc_1_d
The subFields are an implementation detail of the fieldType, and end
users normally should not need to know about them.
-->
<fieldType name="point" class="solr.PointType" dimension="2" subFieldSuffix="_d"/>
<!-- A specialized field for geospatial search. If indexed, this fieldType must not be multivalued. -->
<fieldType name="location" class="solr.LatLonType" subFieldSuffix="_coordinate"/>
<!-- An alternative geospatial field type new to Solr 4. It supports multiValued and polygon shapes.
For more information about this and other Spatial fields new to Solr 4, see:
http://wiki.apache.org/solr/SolrAdaptersForLuceneSpatial4
-->
<fieldType name="location_rpt" class="solr.SpatialRecursivePrefixTreeFieldType"
geo="true" distErrPct="0.025" maxDistErr="0.000009" units="degrees" />
<!-- Money/currency field type. See http://wiki.apache.org/solr/MoneyFieldType
Parameters:
defaultCurrency: Specifies the default currency if none specified. Defaults to "USD"
precisionStep: Specifies the precisionStep for the TrieLong field used for the amount
providerClass: Lets you plug in other exchange provider backend:
solr.FileExchangeRateProvider is the default and takes one parameter:
currencyConfig: name of an xml file holding exchange rates
solr.OpenExchangeRatesOrgProvider uses rates from openexchangerates.org:
ratesFileLocation: URL or path to rates JSON file (default latest.json on the web)
refreshInterval: Number of minutes between each rates fetch (default: 1440, min: 60)
-->
<fieldType name="currency" class="solr.CurrencyField" precisionStep="8" defaultCurrency="USD" currencyConfig="currency.xml" />
<!-- some examples for different languages (generally ordered by ISO code) -->
<!-- Arabic -->
<fieldType name="text_ar" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<!-- for any non-arabic -->
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_ar.txt" />
<!-- normalizes ﻯ to ﻱ, etc -->
<filter class="solr.ArabicNormalizationFilterFactory"/>
<filter class="solr.ArabicStemFilterFactory"/>
</analyzer>
</fieldType>
<!-- Bulgarian -->
<fieldType name="text_bg" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_bg.txt" />
<filter class="solr.BulgarianStemFilterFactory"/>
</analyzer>
</fieldType>
<!-- Catalan -->
<fieldType name="text_ca" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<!-- removes l', etc -->
<filter class="solr.ElisionFilterFactory" ignoreCase="true" articles="lang/contractions_ca.txt"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_ca.txt" />
<filter class="solr.SnowballPorterFilterFactory" language="Catalan"/>
</analyzer>
</fieldType>
<!-- CJK bigram (see text_ja for a Japanese configuration using morphological analysis) -->
<fieldType name="text_cjk" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<!-- normalize width before bigram, as e.g. half-width dakuten combine -->
<filter class="solr.CJKWidthFilterFactory"/>
<!-- for any non-CJK -->
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.CJKBigramFilterFactory"/>
</analyzer>
</fieldType>
<!-- Czech -->
<fieldType name="text_cz" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_cz.txt" />
<filter class="solr.CzechStemFilterFactory"/>
</analyzer>
</fieldType>
<!-- Danish -->
<fieldType name="text_da" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_da.txt" format="snowball" />
<filter class="solr.SnowballPorterFilterFactory" language="Danish"/>
</analyzer>
</fieldType>
<!-- German -->
<fieldType name="text_de" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_de.txt" format="snowball" />
<filter class="solr.GermanNormalizationFilterFactory"/>
<filter class="solr.GermanLightStemFilterFactory"/>
<!-- less aggressive: <filter class="solr.GermanMinimalStemFilterFactory"/> -->
<!-- more aggressive: <filter class="solr.SnowballPorterFilterFactory" language="German2"/> -->
</analyzer>
</fieldType>
<!-- Greek -->
<fieldType name="text_el" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<!-- greek specific lowercase for sigma -->
<filter class="solr.GreekLowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="false" words="lang/stopwords_el.txt" />
<filter class="solr.GreekStemFilterFactory"/>
</analyzer>
</fieldType>
<!-- Spanish -->
<fieldType name="text_es" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_es.txt" format="snowball" />
<filter class="solr.SpanishLightStemFilterFactory"/>
<!-- more aggressive: <filter class="solr.SnowballPorterFilterFactory" language="Spanish"/> -->
</analyzer>
</fieldType>
<!-- Basque -->
<fieldType name="text_eu" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_eu.txt" />
<filter class="solr.SnowballPorterFilterFactory" language="Basque"/>
</analyzer>
</fieldType>
<!-- Persian -->
<fieldType name="text_fa" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<!-- for ZWNJ -->
<charFilter class="solr.PersianCharFilterFactory"/>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.ArabicNormalizationFilterFactory"/>
<filter class="solr.PersianNormalizationFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_fa.txt" />
</analyzer>
</fieldType>
<!-- Finnish -->
<fieldType name="text_fi" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_fi.txt" format="snowball" />
<filter class="solr.SnowballPorterFilterFactory" language="Finnish"/>
<!-- less aggressive: <filter class="solr.FinnishLightStemFilterFactory"/> -->
</analyzer>
</fieldType>
<!-- French -->
<fieldType name="text_fr" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<!-- removes l', etc -->
<filter class="solr.ElisionFilterFactory" ignoreCase="true" articles="lang/contractions_fr.txt"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_fr.txt" format="snowball" />
<filter class="solr.FrenchLightStemFilterFactory"/>
<!-- less aggressive: <filter class="solr.FrenchMinimalStemFilterFactory"/> -->
<!-- more aggressive: <filter class="solr.SnowballPorterFilterFactory" language="French"/> -->
</analyzer>
</fieldType>
<!-- Irish -->
<fieldType name="text_ga" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<!-- removes d', etc -->
<filter class="solr.ElisionFilterFactory" ignoreCase="true" articles="lang/contractions_ga.txt"/>
<!-- removes n-, etc. position increments is intentionally false! -->
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/hyphenations_ga.txt"/>
<filter class="solr.IrishLowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_ga.txt"/>
<filter class="solr.SnowballPorterFilterFactory" language="Irish"/>
</analyzer>
</fieldType>
<!-- Galician -->
<fieldType name="text_gl" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_gl.txt" />
<filter class="solr.GalicianStemFilterFactory"/>
<!-- less aggressive: <filter class="solr.GalicianMinimalStemFilterFactory"/> -->
</analyzer>
</fieldType>
<!-- Hindi -->
<fieldType name="text_hi" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<!-- normalizes unicode representation -->
<filter class="solr.IndicNormalizationFilterFactory"/>
<!-- normalizes variation in spelling -->
<filter class="solr.HindiNormalizationFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_hi.txt" />
<filter class="solr.HindiStemFilterFactory"/>
</analyzer>
</fieldType>
<!-- Hungarian -->
<fieldType name="text_hu" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_hu.txt" format="snowball" />
<filter class="solr.SnowballPorterFilterFactory" language="Hungarian"/>
<!-- less aggressive: <filter class="solr.HungarianLightStemFilterFactory"/> -->
</analyzer>
</fieldType>
<!-- Armenian -->
<fieldType name="text_hy" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_hy.txt" />
<filter class="solr.SnowballPorterFilterFactory" language="Armenian"/>
</analyzer>
</fieldType>
<!-- Indonesian -->
<fieldType name="text_id" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_id.txt" />
<!-- for a less aggressive approach (only inflectional suffixes), set stemDerivational to false -->
<filter class="solr.IndonesianStemFilterFactory" stemDerivational="true"/>
</analyzer>
</fieldType>
<!-- Italian -->
<fieldType name="text_it" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<!-- removes l', etc -->
<filter class="solr.ElisionFilterFactory" ignoreCase="true" articles="lang/contractions_it.txt"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_it.txt" format="snowball" />
<filter class="solr.ItalianLightStemFilterFactory"/>
<!-- more aggressive: <filter class="solr.SnowballPorterFilterFactory" language="Italian"/> -->
</analyzer>
</fieldType>
<!-- Japanese using morphological analysis (see text_cjk for a configuration using bigramming)
NOTE: If you want to optimize search for precision, use default operator AND in your query
parser config with <solrQueryParser defaultOperator="AND"/> further down in this file. Use
OR if you would like to optimize for recall (default).
-->
<fieldType name="text_ja" class="solr.TextField" positionIncrementGap="100" autoGeneratePhraseQueries="false">
<analyzer>
<!-- Kuromoji Japanese morphological analyzer/tokenizer (JapaneseTokenizer)
Kuromoji has a search mode (default) that does segmentation useful for search. A heuristic
is used to segment compounds into its parts and the compound itself is kept as synonym.
Valid values for attribute mode are:
normal: regular segmentation
search: segmentation useful for search with synonyms compounds (default)
extended: same as search mode, but unigrams unknown words (experimental)
For some applications it might be good to use search mode for indexing and normal mode for
queries to reduce recall and prevent parts of compounds from being matched and highlighted.
Use <analyzer type="index"> and <analyzer type="query"> for this and mode normal in query.
Kuromoji also has a convenient user dictionary feature that allows overriding the statistical
model with your own entries for segmentation, part-of-speech tags and readings without a need
to specify weights. Notice that user dictionaries have not been subject to extensive testing.
User dictionary attributes are:
userDictionary: user dictionary filename
userDictionaryEncoding: user dictionary encoding (default is UTF-8)
See lang/userdict_ja.txt for a sample user dictionary file.
Punctuation characters are discarded by default. Use discardPunctuation="false" to keep them.
See http://wiki.apache.org/solr/JapaneseLanguageSupport for more on Japanese language support.
-->
<tokenizer class="solr.JapaneseTokenizerFactory" mode="search"/>
<!--<tokenizer class="solr.JapaneseTokenizerFactory" mode="search" userDictionary="lang/userdict_ja.txt"/>-->
<!-- Reduces inflected verbs and adjectives to their base/dictionary forms (辞書形) -->
<filter class="solr.JapaneseBaseFormFilterFactory"/>
<!-- Removes tokens with certain part-of-speech tags -->
<filter class="solr.JapanesePartOfSpeechStopFilterFactory" tags="lang/stoptags_ja.txt" />
<!-- Normalizes full-width romaji to half-width and half-width kana to full-width (Unicode NFKC subset) -->
<filter class="solr.CJKWidthFilterFactory"/>
<!-- Removes common tokens typically not useful for search, but have a negative effect on ranking -->
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_ja.txt" />
<!-- Normalizes common katakana spelling variations by removing any last long sound character (U+30FC) -->
<filter class="solr.JapaneseKatakanaStemFilterFactory" minimumLength="4"/>
<!-- Lower-cases romaji characters -->
<filter class="solr.LowerCaseFilterFactory"/>
</analyzer>
</fieldType>
<!-- Latvian -->
<fieldType name="text_lv" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_lv.txt" />
<filter class="solr.LatvianStemFilterFactory"/>
</analyzer>
</fieldType>
<!-- Dutch -->
<fieldType name="text_nl" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_nl.txt" format="snowball" />
<filter class="solr.StemmerOverrideFilterFactory" dictionary="lang/stemdict_nl.txt" ignoreCase="false"/>
<filter class="solr.SnowballPorterFilterFactory" language="Dutch"/>
</analyzer>
</fieldType>
<!-- Norwegian -->
<fieldType name="text_no" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_no.txt" format="snowball" />
<filter class="solr.SnowballPorterFilterFactory" language="Norwegian"/>
<!-- less aggressive: <filter class="solr.NorwegianLightStemFilterFactory" variant="nb"/> -->
<!-- singular/plural: <filter class="solr.NorwegianMinimalStemFilterFactory" variant="nb"/> -->
<!-- The "light" and "minimal" stemmers support variants: nb=Bokmål, nn=Nynorsk, no=Both -->
</analyzer>
</fieldType>
<!-- Portuguese -->
<fieldType name="text_pt" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_pt.txt" format="snowball" />
<filter class="solr.PortugueseLightStemFilterFactory"/>
<!-- less aggressive: <filter class="solr.PortugueseMinimalStemFilterFactory"/> -->
<!-- more aggressive: <filter class="solr.SnowballPorterFilterFactory" language="Portuguese"/> -->
<!-- most aggressive: <filter class="solr.PortugueseStemFilterFactory"/> -->
</analyzer>
</fieldType>
<!-- Romanian -->
<fieldType name="text_ro" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_ro.txt" />
<filter class="solr.SnowballPorterFilterFactory" language="Romanian"/>
</analyzer>
</fieldType>
<!-- Russian -->
<fieldType name="text_ru" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_ru.txt" format="snowball" />
<filter class="solr.SnowballPorterFilterFactory" language="Russian"/>
<!-- less aggressive: <filter class="solr.RussianLightStemFilterFactory"/> -->
</analyzer>
</fieldType>
<!-- Swedish -->
<fieldType name="text_sv" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_sv.txt" format="snowball" />
<filter class="solr.SnowballPorterFilterFactory" language="Swedish"/>
<!-- less aggressive: <filter class="solr.SwedishLightStemFilterFactory"/> -->
</analyzer>
</fieldType>
<!-- Thai -->
<fieldType name="text_th" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.ThaiWordFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_th.txt" />
</analyzer>
</fieldType>
<!-- Turkish -->
<fieldType name="text_tr" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.TurkishLowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="false" words="lang/stopwords_tr.txt" />
<filter class="solr.SnowballPorterFilterFactory" language="Turkish"/>
</analyzer>
</fieldType>
</types>
<!-- Similarity is the scoring routine for each document vs. a query.
A custom Similarity or SimilarityFactory may be specified here, but
the default is fine for most applications.
For more info: http://wiki.apache.org/solr/SchemaXml#Similarity
-->
<!--
<similarity class="com.example.solr.CustomSimilarityFactory">
<str name="paramkey">param value</str>
</similarity>
-->
</schema>
"""
| 126.152461
| 96,564
| 0.722672
| 23,378
| 210,170
| 6.43156
| 0.093122
| 0.038548
| 0.068357
| 0.054151
| 0.793804
| 0.779644
| 0.753879
| 0.734066
| 0.717838
| 0.703552
| 0.000114
| 0.024681
| 0.08505
| 210,170
| 1,665
| 96,565
| 126.228228
| 0.757102
| 0.009706
| 0
| 0.36422
| 0
| 0.059872
| 0.924364
| 0.628154
| 0
| 0
| 0
| 0.000601
| 0.052031
| 1
| 0.014968
| false
| 0.000713
| 0.00784
| 0.000713
| 0.032787
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
7cf6b39db3b711cf6a72da02cd3d64e1fe916614
| 276
|
py
|
Python
|
jsondler/json_tools/__init__.py
|
loven-doo/jsondler
|
1466f022fc059a5a58e34dcc3ebddb76b5f82c5c
|
[
"BSD-3-Clause"
] | null | null | null |
jsondler/json_tools/__init__.py
|
loven-doo/jsondler
|
1466f022fc059a5a58e34dcc3ebddb76b5f82c5c
|
[
"BSD-3-Clause"
] | null | null | null |
jsondler/json_tools/__init__.py
|
loven-doo/jsondler
|
1466f022fc059a5a58e34dcc3ebddb76b5f82c5c
|
[
"BSD-3-Clause"
] | null | null | null |
from jsondler.json_tools.requests import get_by_path, deepupdate, deepdiff, query, where
from jsondler.json_tools.index import index_dicts_list
from jsondler.json_tools.restruct import columnize_dicts_list
from jsondler.json_tools.sort import sort_dicts_list, get_paths_order
| 55.2
| 88
| 0.876812
| 43
| 276
| 5.302326
| 0.488372
| 0.210526
| 0.280702
| 0.368421
| 0.263158
| 0.263158
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076087
| 276
| 4
| 89
| 69
| 0.894118
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7cfb960110c41345cf6de27aa3b8c945bfe271a5
| 4,744
|
py
|
Python
|
nc/migrations/0030_auto_20180605_0032.py
|
kfarrelly/nucleo
|
096fa9fdfac39383269904f5d58b3714ce2d2f46
|
[
"Apache-2.0"
] | 1
|
2022-01-10T23:35:53.000Z
|
2022-01-10T23:35:53.000Z
|
nc/migrations/0030_auto_20180605_0032.py
|
kfarrelly/nucleo
|
096fa9fdfac39383269904f5d58b3714ce2d2f46
|
[
"Apache-2.0"
] | null | null | null |
nc/migrations/0030_auto_20180605_0032.py
|
kfarrelly/nucleo
|
096fa9fdfac39383269904f5d58b3714ce2d2f46
|
[
"Apache-2.0"
] | 1
|
2022-01-10T23:36:32.000Z
|
2022-01-10T23:36:32.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-06-05 00:32
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
import functools
import nc.models
import nc.validators
class Migration(migrations.Migration):
dependencies = [
('nc', '0029_auto_20180604_2327'),
]
operations = [
migrations.AddField(
model_name='asset',
name='whitepaper',
field=models.FileField(blank=True, default=None, null=True, upload_to=functools.partial(nc.models.model_file_directory_path, *(), **{b'field': 'whitepaper'}), validators=[django.core.validators.FileExtensionValidator(allowed_extensions=[b'pdf']), nc.validators.FileSizeValidator(limit_value=10485760), nc.validators.MimeTypeValidator(allowed_mimetypes=[b'application/pdf'])]),
),
migrations.AlterField(
model_name='account',
name='pic',
field=models.ImageField(blank=True, default=None, null=True, upload_to=functools.partial(nc.models.model_file_directory_path, *(), **{b'field': 'pic'}), validators=[nc.validators.FileSizeValidator(limit_value=10485760), nc.validators.MimeTypeValidator(allowed_mimetypes=[b'image/x-cmu-raster', b'image/x-xbitmap', b'image/gif', b'image/x-portable-bitmap', b'image/jpeg', b'application/x-hdf', b'application/postscript', b'image/png', b'image/vnd.microsoft.icon', b'image/x-rgb', b'video/mpeg', b'image/x-ms-bmp', b'image/x-xpixmap', b'image/x-portable-graymap', b'image/x-portable-pixmap', b'image/tiff', b'application/pdf'])], verbose_name='Account photo'),
),
migrations.AlterField(
model_name='asset',
name='cover',
field=models.ImageField(blank=True, default=None, null=True, upload_to=functools.partial(nc.models.model_file_directory_path, *(), **{b'field': 'cover'}), validators=[nc.validators.FileSizeValidator(limit_value=10485760), nc.validators.MimeTypeValidator(allowed_mimetypes=[b'image/x-cmu-raster', b'image/x-xbitmap', b'image/gif', b'image/x-portable-bitmap', b'image/jpeg', b'application/x-hdf', b'application/postscript', b'image/png', b'image/vnd.microsoft.icon', b'image/x-rgb', b'video/mpeg', b'image/x-ms-bmp', b'image/x-xpixmap', b'image/x-portable-graymap', b'image/x-portable-pixmap', b'image/tiff', b'application/pdf'])], verbose_name='Asset profile cover photo'),
),
migrations.AlterField(
model_name='asset',
name='pic',
field=models.ImageField(blank=True, default=None, null=True, upload_to=functools.partial(nc.models.model_file_directory_path, *(), **{b'field': 'pic'}), validators=[nc.validators.FileSizeValidator(limit_value=10485760), nc.validators.MimeTypeValidator(allowed_mimetypes=[b'image/x-cmu-raster', b'image/x-xbitmap', b'image/gif', b'image/x-portable-bitmap', b'image/jpeg', b'application/x-hdf', b'application/postscript', b'image/png', b'image/vnd.microsoft.icon', b'image/x-rgb', b'video/mpeg', b'image/x-ms-bmp', b'image/x-xpixmap', b'image/x-portable-graymap', b'image/x-portable-pixmap', b'image/tiff', b'application/pdf'])], verbose_name='Asset photo'),
),
migrations.AlterField(
model_name='profile',
name='cover',
field=models.ImageField(blank=True, default=None, null=True, upload_to=functools.partial(nc.models.profile_file_directory_path, *(), **{b'field': 'cover'}), validators=[nc.validators.FileSizeValidator(limit_value=10485760), nc.validators.MimeTypeValidator(allowed_mimetypes=[b'image/x-cmu-raster', b'image/x-xbitmap', b'image/gif', b'image/x-portable-bitmap', b'image/jpeg', b'application/x-hdf', b'application/postscript', b'image/png', b'image/vnd.microsoft.icon', b'image/x-rgb', b'video/mpeg', b'image/x-ms-bmp', b'image/x-xpixmap', b'image/x-portable-graymap', b'image/x-portable-pixmap', b'image/tiff', b'application/pdf'])], verbose_name='Cover photo'),
),
migrations.AlterField(
model_name='profile',
name='pic',
field=models.ImageField(blank=True, default=None, null=True, upload_to=functools.partial(nc.models.profile_file_directory_path, *(), **{b'field': 'pic'}), validators=[nc.validators.FileSizeValidator(limit_value=10485760), nc.validators.MimeTypeValidator(allowed_mimetypes=[b'image/x-cmu-raster', b'image/x-xbitmap', b'image/gif', b'image/x-portable-bitmap', b'image/jpeg', b'application/x-hdf', b'application/postscript', b'image/png', b'image/vnd.microsoft.icon', b'image/x-rgb', b'video/mpeg', b'image/x-ms-bmp', b'image/x-xpixmap', b'image/x-portable-graymap', b'image/x-portable-pixmap', b'image/tiff', b'application/pdf'])], verbose_name='Profile picture'),
),
]
| 94.88
| 684
| 0.704469
| 663
| 4,744
| 4.956259
| 0.147813
| 0.118685
| 0.08521
| 0.068472
| 0.851491
| 0.851491
| 0.848448
| 0.799757
| 0.799757
| 0.799757
| 0
| 0.01957
| 0.116779
| 4,744
| 49
| 685
| 96.816327
| 0.764678
| 0.014545
| 0
| 0.5
| 1
| 0
| 0.339683
| 0.129067
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.214286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6b2df2a898ddeee3fd75e883cf4e4aa3bca387df
| 130
|
py
|
Python
|
s3imageservice/celery.py
|
The-Politico/django-s3-image-service
|
da8afd36b82c031ed09d881e7b3de013061e9ccd
|
[
"MIT"
] | 7
|
2018-07-23T16:35:07.000Z
|
2021-12-22T16:42:08.000Z
|
s3imageservice/celery.py
|
The-Politico/django-s3-image-service
|
da8afd36b82c031ed09d881e7b3de013061e9ccd
|
[
"MIT"
] | 13
|
2019-01-07T22:42:30.000Z
|
2022-02-27T10:53:02.000Z
|
s3imageservice/celery.py
|
The-Politico/django-s3-image-service
|
da8afd36b82c031ed09d881e7b3de013061e9ccd
|
[
"MIT"
] | 2
|
2020-07-27T02:25:59.000Z
|
2020-10-17T20:54:22.000Z
|
# flake8: noqa
from s3imageservice.tasks.aws import publish_to_aws
from s3imageservice.tasks.process_images import process_images
| 32.5
| 62
| 0.869231
| 18
| 130
| 6.055556
| 0.611111
| 0.330275
| 0.422018
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02521
| 0.084615
| 130
| 3
| 63
| 43.333333
| 0.890756
| 0.092308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6b3904e6e83f23552760c2fc7f92020174c52e3f
| 146,908
|
py
|
Python
|
tests/api_data/dsm_6/surveillance_station/const_6_api_info.py
|
synodriver/synologydsm-api
|
cb086f53dd3f4f8eceb4ab4dbc07bc8888df2b17
|
[
"MIT"
] | 37
|
2020-02-10T19:10:40.000Z
|
2022-03-04T17:17:41.000Z
|
tests/api_data/dsm_6/surveillance_station/const_6_api_info.py
|
synodriver/synologydsm-api
|
cb086f53dd3f4f8eceb4ab4dbc07bc8888df2b17
|
[
"MIT"
] | 161
|
2020-12-22T06:47:36.000Z
|
2022-03-28T21:09:10.000Z
|
tests/api_data/dsm_6/surveillance_station/const_6_api_info.py
|
mib1185/python-synology
|
dc721537b981d97c1e53ee8a4f20f9536805c360
|
[
"MIT"
] | 19
|
2020-04-02T09:23:08.000Z
|
2021-11-29T13:11:58.000Z
|
"""DSM 6 SYNO.API.Info data with surveillance surppot."""
DSM_6_API_INFO = {
"data": {
"SYNO.API.Auth": {"maxVersion": 6, "minVersion": 1, "path": "auth.cgi"},
"SYNO.API.Encryption": {
"maxVersion": 1,
"minVersion": 1,
"path": "encryption.cgi",
},
"SYNO.API.Info": {"maxVersion": 1, "minVersion": 1, "path": "query.cgi"},
"SYNO.API.OTP": {"maxVersion": 1, "minVersion": 1, "path": "otp.cgi"},
"SYNO.AntiVirus.Config": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.AntiVirus.FileExt": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.AntiVirus.General": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.AntiVirus.Log": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.AntiVirus.Purchase": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.AntiVirus.Quarantine": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.AntiVirus.Scan": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.AntiVirus.Schedule": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.AntiVirus.WhiteList": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.AudioPlayer": {
"maxVersion": 2,
"minVersion": 2,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.AudioPlayer.Stream": {
"maxVersion": 2,
"minVersion": 2,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.AudioStation.Album": {
"maxVersion": 3,
"minVersion": 1,
"path": "AudioStation/album.cgi",
},
"SYNO.AudioStation.Artist": {
"maxVersion": 4,
"minVersion": 1,
"path": "AudioStation/artist.cgi",
},
"SYNO.AudioStation.Browse.Playlist": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.AudioStation.Composer": {
"maxVersion": 2,
"minVersion": 1,
"path": "AudioStation/composer.cgi",
},
"SYNO.AudioStation.Cover": {
"maxVersion": 3,
"minVersion": 1,
"path": "AudioStation/cover.cgi",
},
"SYNO.AudioStation.Download": {
"maxVersion": 1,
"minVersion": 1,
"path": "AudioStation/download.cgi",
},
"SYNO.AudioStation.Folder": {
"maxVersion": 3,
"minVersion": 1,
"path": "AudioStation/folder.cgi",
},
"SYNO.AudioStation.Genre": {
"maxVersion": 3,
"minVersion": 1,
"path": "AudioStation/genre.cgi",
},
"SYNO.AudioStation.Info": {
"maxVersion": 4,
"minVersion": 1,
"path": "AudioStation/info.cgi",
},
"SYNO.AudioStation.Lyrics": {
"maxVersion": 2,
"minVersion": 1,
"path": "AudioStation/lyrics.cgi",
},
"SYNO.AudioStation.LyricsSearch": {
"maxVersion": 2,
"minVersion": 1,
"path": "AudioStation/lyrics_search.cgi",
},
"SYNO.AudioStation.MediaServer": {
"maxVersion": 1,
"minVersion": 1,
"path": "AudioStation/media_server.cgi",
},
"SYNO.AudioStation.Pin": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.AudioStation.Playlist": {
"maxVersion": 3,
"minVersion": 1,
"path": "AudioStation/playlist.cgi",
},
"SYNO.AudioStation.Proxy": {
"maxVersion": 1,
"minVersion": 1,
"path": "AudioStation/proxy.cgi",
},
"SYNO.AudioStation.Radio": {
"maxVersion": 2,
"minVersion": 1,
"path": "AudioStation/radio.cgi",
},
"SYNO.AudioStation.RemotePlayer": {
"maxVersion": 3,
"minVersion": 1,
"path": "AudioStation/remote_player.cgi",
},
"SYNO.AudioStation.RemotePlayerStatus": {
"maxVersion": 1,
"minVersion": 1,
"path": "AudioStation/remote_player_status.cgi",
},
"SYNO.AudioStation.Search": {
"maxVersion": 1,
"minVersion": 1,
"path": "AudioStation/search.cgi",
},
"SYNO.AudioStation.Song": {
"maxVersion": 3,
"minVersion": 1,
"path": "AudioStation/song.cgi",
},
"SYNO.AudioStation.Stream": {
"maxVersion": 2,
"minVersion": 1,
"path": "AudioStation/stream.cgi",
},
"SYNO.AudioStation.Tag": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.AudioStation.VoiceAssistant.Browse": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.AudioStation.VoiceAssistant.Challenge": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.AudioStation.VoiceAssistant.Info": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.AudioStation.VoiceAssistant.Stream": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.AudioStation.WebPlayer": {
"maxVersion": 1,
"minVersion": 1,
"path": "AudioStation/web_player.cgi",
},
"SYNO.Backup.App": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.App.Backup": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.App.Restore": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.App2.Backup": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.App2.Restore": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Config.Backup": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Config.Restore": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Lunbackup": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Repository": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Repository.Certificate": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Restore": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Server": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Service.NetworkBackup": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Service.TimeBackup": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Share.Restore": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Source.Folder": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Storage.AmazonCloudDrive.Container": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Storage.Azure.Container": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Storage.Connect.Network": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Storage.Dropbox.Container": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Storage.GoogleDrive.Container": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Storage.HiDrive.Container": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Storage.OpenStack.Container": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Storage.OpenStack.Region": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Storage.S3.Bucket": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Storage.Share.Local": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Storage.Share.Network": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Storage.Share.Rsync": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Storage.WebDAV.Container": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Storage.hubiC.Container": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Target": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Target.Config": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Task": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Version": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Backup.Version.History": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Cal.AuthForeign": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Cal.Cal": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Cal.Chatbot": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Cal.Event": {
"maxVersion": 4,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Cal.InviteMail": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Cal.InviteMailInit": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Cal.Proxy": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Cal.SendMail": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Cal.Setting": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Cal.Share.Priv": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Cal.Sharing": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Cal.SyncUser": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Cal.Timezone": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Cal.Todo": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.CloudSync": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.ACL": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.AppNotify": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.AppPortal": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.AppPortal.AccessControl": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.AppPortal.Config": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.AppPortal.ReverseProxy": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.AppPriv": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.AppPriv.App": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.AppPriv.Rule": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.BandwidthControl": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.BandwidthControl.Protocol": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.BandwidthControl.Status": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.CMS": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.CMS.Cache": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.CMS.Info": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.CMS.Policy": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.CMS.ServerInfo": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.CMS.Token": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Certificate": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Certificate.CRT": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Certificate.CSR": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Certificate.LetsEncrypt": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Certificate.LetsEncrypt.Account": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Certificate.Service": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.CurrentConnection": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.DDNS.ExtIP": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.DDNS.Provider": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.DDNS.Record": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.DDNS.Synology": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.DDNS.TWNIC": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.DSMNotify": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.DataCollect": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.DataCollect.Application": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Desktop.Defs": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Desktop.Initdata": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Desktop.JSUIString": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Desktop.SessionData": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Desktop.Timeout": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Desktop.UIString": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Directory.Azure.SSO": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Directory.Domain": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Directory.Domain.ADHealthCheck": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Directory.Domain.Conf": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Directory.Domain.Schedule": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Directory.LDAP": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Directory.LDAP.BaseDN": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Directory.LDAP.Login.Notify": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Directory.LDAP.Profile": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Directory.SSO": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Directory.SSO.Profile": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Directory.SSO.utils": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Directory.WebSphere.SSO": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.EventScheduler": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.ExternalDevice.Bluetooth": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.ExternalDevice.Bluetooth.Device": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.ExternalDevice.Bluetooth.Settings": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.ExternalDevice.DefaultPermission": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.ExternalDevice.Printer": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.ExternalDevice.Printer.BonjourSharing": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.ExternalDevice.Printer.Driver": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.ExternalDevice.Printer.Network": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.ExternalDevice.Printer.Network.Host": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.ExternalDevice.Printer.OAuth": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.ExternalDevice.Printer.USB": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.ExternalDevice.Storage.EUnit": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.ExternalDevice.Storage.Setting": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.ExternalDevice.Storage.USB": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.ExternalDevice.Storage.eSATA": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.ExternalDevice.UPS": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.EzInternet": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Factory.Config": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Factory.Manutild": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.File": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.File.Thumbnail": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.FileServ.AFP": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.FileServ.FTP": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.FileServ.FTP.ChrootUser": {
"maxVersion": 2,
"minVersion": 2,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.FileServ.FTP.SFTP": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.FileServ.FTP.Security": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.FileServ.NFS": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.FileServ.NFS.AdvancedSetting": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.FileServ.NFS.IDMap": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.FileServ.NFS.Kerberos": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.FileServ.NFS.SharePrivilege": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.FileServ.ReflinkCopy": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.FileServ.Rsync.Account": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.FileServ.SMB": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.FileServ.ServiceDiscovery": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.FileServ.ServiceDiscovery.WSTransfer": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Findhost": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Group": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Group.ExtraAdmin": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Group.Member": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Group.ValidLocalAdmin": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.GroupSettings": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Hardware.BeepControl": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Hardware.DCOutput": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Hardware.DCOutput.Task": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Hardware.FanSpeed": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Hardware.Hibernation": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Hardware.LCM": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Hardware.Led.Brightness": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Hardware.MemoryLayout": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Hardware.NeedReboot": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Hardware.PowerRecovery": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Hardware.PowerSchedule": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Hardware.RemoteFanStatus": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Hardware.SpectreMeltdown": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Hardware.VideoTranscoding": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Hardware.ZRAM": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Help": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.ISCSI.LUN": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.ISCSI.Lunbkp": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.ISCSI.Node": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.ISCSI.Replication": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.ISCSI.Target": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.ISCSI.VLUN": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.MediaIndexing": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.MediaIndexing.IndexFolder": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.MediaIndexing.MediaConverter": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.MediaIndexing.MobileEnabled": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.MediaIndexing.ThumbnailQuality": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.MyDSCenter": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.MyDSCenter.Account": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.MyDSCenter.Purchase": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.Authentication": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.Authentication.Cert": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.Bond": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.Bridge": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.DHCPServer": {
"maxVersion": 4,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.DHCPServer.ClientList": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.DHCPServer.PXE": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.DHCPServer.Reservation": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.DHCPServer.Vendor": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.DHCPServer.WPAD": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.Ethernet": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.IPv6": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.IPv6.Router": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.IPv6.Router.Prefix": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.IPv6Tunnel": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.Interface": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.LocalBridge": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.MACClone": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.OVS": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.PPPoE": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.PPPoE.Relay": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.Proxy": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.Router.ConnectionList": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.Router.CountryCode": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.Router.DMZ": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.Router.Gateway.List": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.Router.LocalLan": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.Router.MacFilter": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.Router.ParentalControl": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.Router.PkgList": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.Router.PortForward": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.Router.Static.Route": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.Router.Topology": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.TrafficControl.RouterRules": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.TrafficControl.Rules": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.UPnPServer": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.USBModem": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.VPN": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.VPN.L2TP": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.VPN.OpenVPN": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.VPN.OpenVPN.CA": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.VPN.OpenVPNWithConf": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.VPN.OpenVPNWithConf.Certs": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.VPN.PPTP": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.WOL": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.Wifi.Client": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.Wifi.Hotspot": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Network.Wifi.WPS": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.NormalUser": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.NormalUser.LoginNotify": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Notification.Advance.CustomizedData": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Notification.Advance.FilterSettings": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Notification.Advance.Variables": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Notification.Advance.WarningPercentage": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Notification.CMS": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Notification.CMS.Conf": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Notification.Mail": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Notification.Mail.Auth": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Notification.Mail.Conf": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Notification.Push": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Notification.Push.AuthToken": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Notification.Push.Conf": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Notification.Push.Mail": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Notification.Push.Mobile": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Notification.SMS": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Notification.SMS.Conf": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Notification.SMS.Provider": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.OAuth.Scope": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.OAuth.Server": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.OTP": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.OTP.Admin": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.OTP.EnforcePolicy": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.OTP.Mail": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Package": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Package.Account": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Package.Control": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Package.FakeIFrame": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Package.Feed": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Package.Feed.Keyring": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Package.Info": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Package.Installation": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Package.Installation.Download": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Package.Log": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Package.MyDS": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Package.MyDS.Purchase": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Package.Screenshot": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Package.Screenshot.Server": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Package.Server": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Package.Setting": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Package.Setting.Update": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Package.Setting.Volume": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Package.Term": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Package.Thumb": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Package.Uninstallation": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.PersonalNotification.Device": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.PersonalNotification.Event": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.PersonalNotification.Filter": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.PersonalNotification.Settings": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.PersonalNotification.android": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.PersonalNotification.iOS": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.PersonalNotification.windows": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.PersonalSettings": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.PhotoViewer": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Polling.Data": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.PortForwarding": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.PortForwarding.Compatibility": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.PortForwarding.RouterConf": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.PortForwarding.RouterInfo": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.PortForwarding.RouterList": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.PortForwarding.Rules": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.PortForwarding.Rules.Serv": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.PortForwarding.UserDataCollector": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.QuickConnect": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.QuickConnect.Permission": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.QuickConnect.Upnp": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.QuickStart.Info": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.QuickStart.Install": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Quota": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.RecycleBin": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.RecycleBin.User": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Region.Language": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Region.NTP": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Region.NTP.DateTimeFormat": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Region.NTP.Server": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.SNMP": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Security.AutoBlock": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Security.AutoBlock.Rules": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Security.DSM": {
"maxVersion": 4,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Security.DSM.Embed": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Security.DSM.Proxy": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Security.DoS": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Security.Firewall": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Security.Firewall.Adapter": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Security.Firewall.Conf": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Security.Firewall.Geoip": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Security.Firewall.Profile": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Security.Firewall.Profile.Apply": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Security.Firewall.Rules": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Security.Firewall.Rules.Serv": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Security.VPNPassthrough": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Security.VPNPassthrough.Status": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.SecurityScan.Conf": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.SecurityScan.Operation": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.SecurityScan.Status": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Service": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Service.Conf": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Service.PortInfo": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Share": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Share.Crypto": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Share.Crypto.Key": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Share.CryptoFile": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Share.KeyManager.AutoKey": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Share.KeyManager.Key": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Share.KeyManager.MachineKey": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Share.KeyManager.Store": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Share.Migration": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Share.Migration.Task": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Share.Permission": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Share.Snapshot": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Sharing": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Sharing.Initdata": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Sharing.Login": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Sharing.Session": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.SmartBlock": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.SmartBlock.Device": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.SmartBlock.Trusted": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.SmartBlock.Untrusted": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.SmartBlock.User": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Storage.Disk": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Storage.Pool": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Storage.Volume": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Storage.iSCSILUN": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Storage.iSCSITargets": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Storage.iSCSIUtils": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.SupportForm.Form": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.SupportForm.Log": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.SupportForm.Service": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Synohdpack": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.SyslogClient.FileTransfer": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.SyslogClient.Log": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.SyslogClient.PersonalActivity": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.SyslogClient.Setting.Notify": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.SyslogClient.Status": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.System": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.System.Process": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.System.ProcessGroup": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.System.ResetButton": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.System.Status": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.System.Utilization": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.TFTP": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.TaskScheduler": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Terminal": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Theme.AppPortalLogin": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Theme.Desktop": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Theme.FileSharingLogin": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Theme.Image": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Theme.Login": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.TrustDevice": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Tuned": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.UISearch": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Upgrade": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Upgrade.AutoUpgrade": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Upgrade.Group": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Upgrade.Group.Download": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Upgrade.Group.Setting": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Upgrade.GroupInstall": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Upgrade.GroupInstall.Network": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Upgrade.Patch": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Upgrade.PreCheck": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Upgrade.Server": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Upgrade.Server.Download": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Upgrade.Setting": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.User": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.User.Group": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.User.Home": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.User.PasswordConfirm": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.User.PasswordExpiry": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.User.PasswordMeter": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.User.PasswordPolicy": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.UserSettings": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Virtualization.Host.Capability": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Web.DSM": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Web.DSM.External": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Web.Security.HTTPCompression": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Core.Web.Security.TLSProfile": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.DR.Node": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.DR.Node.Credential": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.DR.Node.Session": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.DSM.FindMe": {
"maxVersion": 2,
"minVersion": 2,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.DSM.Info": {
"maxVersion": 2,
"minVersion": 2,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.DSM.Network": {
"maxVersion": 2,
"minVersion": 2,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.DSM.PortEnable": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.DSM.PushNotification": {
"maxVersion": 2,
"minVersion": 2,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.DisasterRecovery.Log": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.DisasterRecovery.Retention": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Entry.Request": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Entry.Request.Polling": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.BackgroundTask": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.CheckExist": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.CheckPermission": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.Compress": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.CopyMove": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.CreateFolder": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.Delete": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.DirSize": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.Download": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.External.GoogleDrive": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.Extract": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.Favorite": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.FormUpload": {
"maxVersion": 2,
"minVersion": 2,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.Info": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.List": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.MD5": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.Mount": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.Mount.List": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.Notify": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.Property": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.Property.ACLOwner": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.Property.CompressSize": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.Property.Mtime": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.Rename": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.Search": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.Search.History": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.Settings": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.Sharing": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.Sharing.Download": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.Snapshot": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.Thumb": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.Timeout": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.UIString": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.Upload": {
"maxVersion": 3,
"minVersion": 2,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.UserGrp": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.VFS.Connection": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.VFS.File": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.VFS.GDrive": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.VFS.Profile": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.VFS.Protocol": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.VFS.User": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FileStation.VirtualFolder": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Finder.AppIndexing.Search": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Finder.Bookmark": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Finder.Elastic.SearchHistory": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Finder.Elastic.Spotlight": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Finder.Elastic.Term": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Finder.File": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Finder.File.Cover": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Finder.File.Thumbnail": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Finder.FileIndexing.Folder": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Finder.FileIndexing.Highlight": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Finder.FileIndexing.Indicate": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Finder.FileIndexing.Search": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Finder.FileIndexing.Status": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Finder.FileIndexing.Term": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Finder.Preference": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Finder.Settings": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Finder.UserGrp": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FolderSharing.Download": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FolderSharing.List": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.FolderSharing.Thumb": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.License.HA": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.OAUTH.Client": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.OAUTH.Common": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.OAUTH.Log": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.OAUTH.Token": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Package": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PersonMailAccount": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PersonMailAccount.Contacts": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PersonMailAccount.Mail": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Personal.Application.Info": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Personal.MailAccount": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Personal.MailAccount.Contacts": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Personal.MailAccount.Mail": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Personal.Notification.Conf": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Personal.Notification.Device": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Personal.Notification.Event": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Personal.Notification.Filter": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Personal.Notification.GDPR": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Personal.Notification.Identifier": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Personal.Notification.Mobile": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Personal.Notification.Settings": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Personal.Notification.Token": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Personal.Notification.VapidPublicKey": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Personal.Profile": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Personal.Profile.Photo": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Browse.Album": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Browse.Category": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Browse.Concept": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Browse.Diff": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Browse.Folder": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Browse.GeneralTag": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Browse.Geocoding": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Browse.Item": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Browse.Person": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Browse.RecentlyAdded": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Browse.Timeline": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Browse.Unit": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Discover.Category": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Discover.Similar": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Discover.Status": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Discover.Style": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Download": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Enhancement": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Index": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Search": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Setting.Admin": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Setting.Mobile": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Setting.User": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Sharing": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.SharingLogin": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Streaming": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Thumbnail": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Photo.Upload.Item": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Browse.Album": {
"maxVersion": 2,
"minVersion": 2,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Browse.Category": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Browse.Concept": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Browse.Diff": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Browse.Folder": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Browse.GeneralTag": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Browse.Geocoding": {
"maxVersion": 2,
"minVersion": 2,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Browse.Item": {
"maxVersion": 3,
"minVersion": 2,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Browse.Person": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Browse.RecentlyAdded": {
"maxVersion": 3,
"minVersion": 2,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Browse.Timeline": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Browse.Unit": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Discover.Category": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Discover.Similar": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Discover.Status": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Discover.Style": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Download": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Enhancement": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Index": {
"maxVersion": 2,
"minVersion": 2,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Permission": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Search": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Setting.User": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Sharing": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Streaming": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Thumbnail": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.PhotoTeam.Upload.Item": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.ResourceMonitor.EventRule": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.ResourceMonitor.Log": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.ResourceMonitor.Setting": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.S2S.Client": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.S2S.Client.Job": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.S2S.Server": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.S2S.Server.Pair": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SAS.APIRunner": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SAS.APIRunner.Chatbot": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SAS.Encryption": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SAS.Group": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SAS.Group.Members": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SAS.Guest": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SDS.Backup.Client.Common.Log": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SDS.Backup.Client.Common.Statistic": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SDS.Backup.Client.Common.Target": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SDS.Backup.Client.Common.Version": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SDS.Backup.Client.Explore.File": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SDS.Backup.Client.Explore.Folder": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SDS.Backup.Client.Explore.Job": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SDS.Backup.Client.Explore.Target": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SDS.Backup.Client.Explore.Version": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SDS.Backup.Client.Fuse.Target": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SecurityAdvisor.Conf": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SecurityAdvisor.Conf.Checklist": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SecurityAdvisor.Conf.Checklist.Alert": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SecurityAdvisor.Conf.Location": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SecurityAdvisor.LoginActivity": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SecurityAdvisor.Report": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SecurityAdvisor.Report.HTML": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.ShareLink.Action": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.ShareLink.Download": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.ShareLink.Manage": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Snap.Usage.Share": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Storage.CGI.Check": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Storage.CGI.DualEnclosure": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Storage.CGI.Enclosure": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Storage.CGI.Flashcache": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Storage.CGI.HddMan": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Storage.CGI.Pool": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Storage.CGI.Smart": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Storage.CGI.Smart.Scheduler": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Storage.CGI.Spare": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Storage.CGI.Spare.Conf": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Storage.CGI.Storage": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Storage.CGI.TaipeiEnclosure": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Storage.CGI.Volume": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.ActionRule": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.AddOns": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Alert": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Alert.Setting": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Analytics.Setting": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.AppCenter": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Archiving.Pull": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Archiving.Push": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.AudioOut": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.AudioPattern": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.AudioStream": {
"maxVersion": 2,
"minVersion": 1,
"path": "SurveillanceStation/audioStreaming.cgi",
},
"SYNO.SurveillanceStation.AxisAcsCtrler": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.AxisAcsCtrler.Search": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.CMS": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.CMS.DsSearch": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.CMS.Failover": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.CMS.GetDsStatus": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.CMS.SlavedsList": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.CMS.SlavedsWizard": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Camera": {
"maxVersion": 9,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Camera.Event": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Camera.Export": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Camera.Group": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Camera.Import": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Camera.Intercom": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Camera.Search": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Camera.Status": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Camera.VolEval": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Camera.Wizard": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.CameraCap": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Device": {
"maxVersion": 2,
"minVersion": 1,
"path": "SurveillanceStation/device.cgi",
},
"SYNO.SurveillanceStation.DigitalOutput": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.DualAuth": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Emap": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Emap.Image": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Event": {
"maxVersion": 5,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Event.Export": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Event.Mount": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Event.Mount.Wizard": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.ExternalDevice.IFTTT": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.ExternalDevice.Storage.USB": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.ExternalDevice.Webhook": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.ExternalEvent": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.ExternalRecording": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Fisheye": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.GlobalSearch": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Help": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.HomeMode": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.HomeMode.Mobile": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.IOModule": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.IOModule.Search": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.IPSpeaker": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.IPSpeaker.Broadcast": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.IPSpeaker.Group": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.IPSpeaker.Search": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.IVA": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.IVA.Archive": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.IVA.License": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.IVA.Recording": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.IVA.Report": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.IVA.Simulator": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.IVA.TaskGroup": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Info": {
"maxVersion": 8,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.JoystickSetting": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Layout": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.License": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.LocalDisplay": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Log": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.MobileCam": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Notification": {
"maxVersion": 8,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Notification.Email": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Notification.Filter": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Notification.MobileSetting": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Notification.PushService": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Notification.SMS": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Notification.SMS.ServiceProvider": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Notification.Schedule": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.PTZ": {
"maxVersion": 5,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.PTZ.Patrol": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.PTZ.Preset": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.PersonalSettings.Image": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.PersonalSettings.Layout": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.PersonalSettings.Photo": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Player": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Player.LiveviewSrc": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Preload": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Recording": {
"maxVersion": 6,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Recording.Bookmark": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Recording.Export": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Recording.Mount": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Recording.Mount.Wizard": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Recording.Reindex": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Recording.ShareRecording": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.RecordingPicker": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Share": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.SnapShot": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Sort": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Stream": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Stream.VideoStreaming": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Streaming": {
"maxVersion": 2,
"minVersion": 1,
"path": "SurveillanceStation/streaming.cgi",
},
"SYNO.SurveillanceStation.System": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.TaskQueue": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.TimeLapse": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.TimeLapse.Recording": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Transactions.Device": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Transactions.Transaction": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.UserPrivilege": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.VideoStream": {
"maxVersion": 1,
"minVersion": 1,
"path": "SurveillanceStation/videoStreaming.cgi",
},
"SYNO.SurveillanceStation.VideoStreaming": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.VisualStation": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.VisualStation.Install": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.VisualStation.Layout": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.VisualStation.Search": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.Webhook": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SurveillanceStation.YoutubeLive": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.AdvanceSharing": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.AdvanceSharing.Public": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.AppIntegration": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Authentication": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Config": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Connection": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.DBUsage": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.DSM": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Export": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Files": {
"maxVersion": 3,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Info": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.KeyManagement": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Labels": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Log": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Metrics": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Metrics.Token": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Migration": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Node": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Node.Delete": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Node.Download": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Node.Restore": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Notifications": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Office": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Photos": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Privilege": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Profile": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Revisions": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.SCIM.Photo": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.SCIM.User": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Services.DocumentViewer": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Services.SynologyChat": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Services.VideoStation": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Settings": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Shard": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Share": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Share.Priv": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Sharing": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.String": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Tasks": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.TeamFolders": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Trash": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Users": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDrive.Webhooks": {
"maxVersion": 2,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDriveShareSync.Config": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDriveShareSync.Connection": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDriveShareSync.Session": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.SynologyDriveShareSync.Session.Set": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.Utils": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.VideoPlayer.Subtitle": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.VideoPlayer.SynologyDrive.Subtitle": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.WebDAV.CalDAV": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.WebDAV.CalDAV.Calendar": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
"SYNO.WebDAV.Common": {
"maxVersion": 1,
"minVersion": 1,
"path": "entry.cgi",
"requestFormat": "JSON",
},
},
"success": True,
}
| 30.352893
| 81
| 0.424762
| 10,419
| 146,908
| 5.988291
| 0.059123
| 0.140691
| 0.191852
| 0.314143
| 0.929703
| 0.927346
| 0.91212
| 0.909652
| 0.909652
| 0.907664
| 0
| 0.018946
| 0.411135
| 146,908
| 4,839
| 82
| 30.359165
| 0.702274
| 0.000347
| 0
| 0.658259
| 0
| 0
| 0.434876
| 0.150282
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.00124
| 0.000207
| 0
| 0.000207
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
861369b719399eb44ab3578f84bb10c85b8af8f1
| 63,828
|
py
|
Python
|
detection/maskrcnn_benchmark/modeling/roi_heads/iou_head/loss.py
|
fregu856/ebms_regression
|
95e5b513808d2ea1c2a0aea107727f6e66918f16
|
[
"MIT"
] | 74
|
2020-05-05T01:34:11.000Z
|
2022-03-15T06:47:38.000Z
|
detection/maskrcnn_benchmark/modeling/roi_heads/iou_head/loss.py
|
fregu856/ebms_regression
|
95e5b513808d2ea1c2a0aea107727f6e66918f16
|
[
"MIT"
] | 5
|
2020-08-05T10:07:27.000Z
|
2021-12-24T09:37:53.000Z
|
detection/maskrcnn_benchmark/modeling/roi_heads/iou_head/loss.py
|
fregu856/ebms_regression
|
95e5b513808d2ea1c2a0aea107727f6e66918f16
|
[
"MIT"
] | 14
|
2020-05-14T21:59:00.000Z
|
2022-01-29T16:08:00.000Z
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import torch
import torch.nn as nn
from torch.nn import functional as F
from maskrcnn_benchmark.layers import smooth_l1_loss
from maskrcnn_benchmark.structures.boxlist_ops import boxlist_iou
from maskrcnn_benchmark.structures.bounding_box import BoxList
from maskrcnn_benchmark.modeling.utils import cat
import random
import math
def iou(reference, proposals):
"""Compute the IoU between a reference box with multiple proposal boxes.
args:
reference - Tensor of shape (1, 4).
proposals - Tensor of shape (num_proposals, 4)
returns:
torch.Tensor - Tensor of shape (num_proposals,) containing IoU of reference box with each proposal box.
"""
# Intersection box
tl = torch.max(reference[:,:2], proposals[:,:2])
br = torch.min(reference[:,:2] + reference[:,2:], proposals[:,:2] + proposals[:,2:])
sz = (br - tl).clamp(0)
# Area
intersection = sz.prod(dim=1)
union = reference[:,2:].prod(dim=1) + proposals[:,2:].prod(dim=1) - intersection
return intersection / union
def rand_uniform(a, b, shape=1):
""" sample numbers uniformly between a and b.
args:
a - lower bound
b - upper bound
shape - shape of the output tensor
returns:
torch.Tensor - tensor of shape=shape
"""
return (b - a) * torch.rand(shape) + a
def perturb_box(box, min_iou=0.5, sigma_factor=0.1):
""" Perturb the input box by adding gaussian noise to the co-ordinates
args:
box - input box
min_iou - minimum IoU overlap between input box and the perturbed box
sigma_factor - amount of perturbation, relative to the box size. Can be either a single element, or a list of
sigma_factors, in which case one of them will be uniformly sampled. Further, each of the
sigma_factor element can be either a float, or a tensor
of shape (4,) specifying the sigma_factor per co-ordinate
returns:
torch.Tensor - the perturbed box
"""
if isinstance(sigma_factor, list):
# If list, sample one sigma_factor as current sigma factor
c_sigma_factor = random.choice(sigma_factor)
else:
c_sigma_factor = sigma_factor
if not isinstance(c_sigma_factor, torch.Tensor):
c_sigma_factor = c_sigma_factor * torch.ones(4)
perturb_factor = torch.sqrt(box[2]*box[3])*c_sigma_factor
# multiple tries to ensure that the perturbed box has iou > min_iou with the input box
for i_ in range(100):
c_x = box[0] + 0.5*box[2]
c_y = box[1] + 0.5 * box[3]
c_x_per = random.gauss(c_x, perturb_factor[0])
c_y_per = random.gauss(c_y, perturb_factor[1])
w_per = random.gauss(box[2], perturb_factor[2])
h_per = random.gauss(box[3], perturb_factor[3])
box_per = torch.Tensor([c_x_per - 0.5*w_per, c_y_per - 0.5*h_per, w_per, h_per])
if box_per[2] <= 0:
box_per[2] = box[2]*rand_uniform(0.15, 0.5)
if box_per[3] <= 0:
box_per[3] = box[3]*rand_uniform(0.15, 0.5)
box_iou = iou(box.view(1, 4), box_per.view(1, 4))
# if there is sufficient overlap, return
if box_iou > min_iou:
return box_per, box_iou
# else reduce the perturb factor
perturb_factor *= 0.9
return box_per, box_iou
def rect_to_rel(bb, sz_norm=None):
c = bb[...,:2] + 0.5 * bb[...,2:]
if sz_norm is None:
c_rel = c / bb[...,2:]
else:
c_rel = c / sz_norm
sz_rel = torch.log(bb[...,2:])
return torch.cat((c_rel, sz_rel), dim=-1)
def rel_to_rect(bb, sz_norm=None):
sz = torch.exp(bb[...,2:])
if sz_norm is None:
c = bb[...,:2] * sz
else:
c = bb[...,:2] * sz_norm
tl = c - 0.5 * sz
return torch.cat((tl, sz), dim=-1)
def gauss_density_centered(x, std):
return torch.exp(-0.5*(x / std)**2) / (math.sqrt(2*math.pi)*std)
def gmm_density_centered(x, std):
"""
Assumes dim=-1 is the component dimension and dim=-2 is feature dimension. Rest are sample dimension.
"""
if x.dim() == std.dim() - 1:
x = x.unsqueeze(-1)
elif not (x.dim() == std.dim() and x.shape[-1] == 1):
raise ValueError('Last dimension must be the gmm stds.')
return gauss_density_centered(x, std).prod(-2).mean(-1)
def sample_gmm_centered(std, num_samples=1):
num_components = std.shape[-1]
num_dims = std.numel() // num_components
std = std.view(1, num_dims, num_components)
# Sample component ids
k = torch.randint(num_components, (num_samples,), dtype=torch.int64)
std_samp = std[0,:,k].t()
# Sample
x_centered = std_samp * torch.randn(num_samples, num_dims)
prob_dens = gmm_density_centered(x_centered, std)
return x_centered, prob_dens
def sample_gmm(mean, std, num_samples=1):
num_dims = mean.numel()
num_components = std.shape[-1]
mean = mean.view(1,num_dims)
std = std.view(1, -1, num_components)
# Sample component ids
k = torch.randint(num_components, (num_samples,), dtype=torch.int64)
std_samp = std[0,:,k].t()
# Sample
x_centered = std_samp * torch.randn(num_samples, num_dims)
x = x_centered + mean
prob_dens = gmm_density_centered(x_centered, std)
return x, prob_dens
def sample_box_gmm(mean_box, proposal_sigma, gt_sigma=None, num_samples=1, add_mean_box=False):
center_std = torch.Tensor([s[0] for s in proposal_sigma])
sz_std = torch.Tensor([s[1] for s in proposal_sigma])
std = torch.stack([center_std, center_std, sz_std, sz_std])
mean_box = mean_box.view(1,4)
sz_norm = mean_box[:,2:].clone()
# Sample boxes
proposals_rel_centered, proposal_density = sample_gmm_centered(std, num_samples)
# Add mean and map back
mean_box_rel = rect_to_rel(mean_box, sz_norm)
proposals_rel = proposals_rel_centered + mean_box_rel
proposals = rel_to_rect(proposals_rel, sz_norm)
if gt_sigma is None or gt_sigma[0] == 0 and gt_sigma[1] == 0:
gt_density = torch.zeros_like(proposal_density)
else:
std_gt = torch.Tensor([gt_sigma[0], gt_sigma[0], gt_sigma[1], gt_sigma[1]]).view(1,4)
gt_density = gauss_density_centered(proposals_rel_centered, std_gt).prod(-1)
if add_mean_box:
proposals = torch.cat((mean_box, proposals))
proposal_density = torch.cat((torch.Tensor([-1]), proposal_density))
gt_density = torch.cat((torch.Tensor([1]), gt_density))
return proposals, proposal_density, gt_density
def nce_sample_gmm_centered(std, num_samples=1): ########################################################
num_components = std.shape[-1]
num_dims = std.numel() // num_components
std = std.view(1, num_dims, num_components)
# Sample component ids
k = torch.randint(num_components, (num_samples,), dtype=torch.int64)
std_samp = std[0,:,k].t()
# Sample
x_centered = std_samp * torch.randn(num_samples, num_dims)
prob_dens = gmm_density_centered(x_centered, std)
prob_dens_zero = gmm_density_centered(torch.zeros_like(x_centered), std)
return x_centered, prob_dens, prob_dens_zero
def nce_sample_box_gmm(mean_box, proposal_sigma, gt_sigma=None, num_samples=1, add_mean_box=False): #########################################################################
center_std = torch.Tensor([s[0] for s in proposal_sigma])
sz_std = torch.Tensor([s[1] for s in proposal_sigma])
std = torch.stack([center_std, center_std, sz_std, sz_std])
mean_box = mean_box.view(1,4)
sz_norm = mean_box[:,2:].clone()
# Sample boxes
proposals_rel_centered, proposal_density, proposal_density_zero = nce_sample_gmm_centered(std, num_samples)
# (proposals_rel_centered has shape: (num_samples, 4))
# (proposal_density has shape: (num_samples))
# (proposal_density_zero has shape: (num_samples)) (all values are identical and constant) (this constant value only depends on std)
# Add mean and map back
mean_box_rel = rect_to_rel(mean_box, sz_norm)
proposals_rel = proposals_rel_centered + mean_box_rel
proposals = rel_to_rect(proposals_rel, sz_norm)
if gt_sigma is None or gt_sigma[0] == 0 and gt_sigma[1] == 0:
gt_density = torch.zeros_like(proposal_density)
else:
std_gt = torch.Tensor([gt_sigma[0], gt_sigma[0], gt_sigma[1], gt_sigma[1]]).view(1,4)
gt_density = gauss_density_centered(proposals_rel_centered, std_gt).prod(-1)
if add_mean_box:
proposals = torch.cat((mean_box, proposals))
proposal_density = torch.cat((torch.Tensor([-1]), proposal_density))
gt_density = torch.cat((torch.Tensor([1]), gt_density))
return proposals, proposal_density, gt_density, proposal_density_zero
def nce_sample_gmm_centered2(std, std2, num_samples=1): ########################################################
num_components = std.shape[-1]
num_dims = std.numel() // num_components
std = std.view(1, num_dims, num_components)
std2 = std2.view(1, num_dims, num_components)
# Sample component ids
k = torch.randint(num_components, (num_samples,), dtype=torch.int64)
std_samp2 = std2[0,:,k].t()
# Sample
x_centered2 = std_samp2 * torch.randn(num_samples, num_dims)
prob_dens2 = gmm_density_centered(x_centered2, std) # (note that it actually should be std here!)
return x_centered2, prob_dens2
def nce_sample_box_gmm2(mean_box, proposal_sigma, beta, gt_sigma=None, num_samples=1, add_mean_box=False): #########################################################################
center_std = torch.Tensor([s[0] for s in proposal_sigma])
sz_std = torch.Tensor([s[1] for s in proposal_sigma])
std = torch.stack([center_std, center_std, sz_std, sz_std])
center_std2 = torch.Tensor([s[0] for s in proposal_sigma])
sz_std2 = torch.Tensor([s[1] for s in proposal_sigma])
std2 = beta*torch.stack([center_std2, center_std2, sz_std2, sz_std2])
# print (std)
# print (std2)
# print ("{{{{{{{{{{{{{{{{{{{{{{{{{{}}}}}}}}}}}}}}}}}}}}}}}}}}")
mean_box = mean_box.view(1,4)
sz_norm = mean_box[:,2:].clone()
# Sample boxes
proposals_rel_centered2, proposal_density2 = nce_sample_gmm_centered2(std, std2, num_samples)
# (proposals_rel_centered has shape: (num_samples, 4))
# (proposal_density has shape: (num_samples))
# (proposal_density_zero has shape: (num_samples)) (all values are identical and constant) (this constant value only depends on std)
# Add mean and map back
mean_box_rel = rect_to_rel(mean_box, sz_norm)
proposals_rel = proposals_rel_centered2 + mean_box_rel
proposals = rel_to_rect(proposals_rel, sz_norm)
if gt_sigma is None or gt_sigma[0] == 0 and gt_sigma[1] == 0:
gt_density = torch.zeros_like(proposal_density2)
else:
std_gt = torch.Tensor([gt_sigma[0], gt_sigma[0], gt_sigma[1], gt_sigma[1]]).view(1,4)
gt_density = gauss_density_centered(proposals_rel_centered, std_gt).prod(-1)
if add_mean_box:
proposals = torch.cat((mean_box, proposals))
proposal_density = torch.cat((torch.Tensor([-1]), proposal_density2))
gt_density = torch.cat((torch.Tensor([1]), gt_density))
return proposals, proposal_density2, gt_density
def kl_regression_loss(scores, sample_density, gt_density, mc_dim=0, eps=0.0, size_average=True):
"""mc_dim is dimension of MC samples."""
L = torch.log(torch.mean(torch.exp(scores) / (sample_density + eps), dim=mc_dim)) - \
torch.mean(scores * (gt_density / (sample_density + eps)), dim=mc_dim)
if size_average:
return L.mean()
else:
return L
def ml_regression_loss(scores, sample_density, gt_density, mc_dim=0, eps=0.0, exp_max=None, size_average=True):
"""mc_dim is dimension of MC samples."""
assert mc_dim == 1
assert (sample_density[:,0,...] == -1).all()
assert (gt_density[:,0,...] == 1).all()
exp_val = scores[:, 1:, ...] - torch.log(sample_density[:, 1:, ...] + eps)
if exp_max is None:
bias = 0
bias_squeeze = 0
else:
bias = (torch.max(exp_val.detach(), dim=mc_dim, keepdim=True)[0] - exp_max).clamp(min=0)
bias_squeeze = bias.squeeze(dim=mc_dim)
L = torch.log(torch.mean(torch.exp(exp_val - bias), dim=mc_dim)) + bias_squeeze - scores[:, 0, ...]
if size_average:
loss = L.mean()
else:
loss = L
return loss
def ml_regression_loss_logsumexp(scores, sample_density, gt_density, mc_dim=0, eps=0.0, exp_max=None, size_average=True): ##############################################
"""mc_dim is dimension of MC samples."""
# (scores has shape: (num_bboxes_in_batch, M)) (M == 128) (num_bboxes_in_batch can be different for each batch)
# (sample_density has shape: (num_bboxes_in_batch, M))
# (gt_density has shape: (num_bboxes_in_batch, M))
assert mc_dim == 1
assert (sample_density[:,0,...] == -1).all()
assert (gt_density[:,0,...] == 1).all()
scores_samples = scores[:, 1:] # (shape: (num_bboxes_in_batch, M-1))
q_y_samples = sample_density[:, 1:] # (shape: (num_bboxes_in_batch, M-1))
scores_gt = scores[:, 0] # (shape: (num_bboxes_in_batch))
num_samples = scores_samples.size(1) # (M-1)
log_Z = torch.logsumexp(scores_samples - torch.log(q_y_samples), dim=1) - math.log(num_samples) # (shape: (num_bboxes_in_batch))
L = log_Z - scores_gt # (shape: (num_bboxes_in_batch))
# print (scores_samples.size())
# print (q_y_samples.size())
# print (scores_gt.size())
# print (num_samples)
# print (math.log(num_samples))
# print ("{{{{{{{{{{{{{{{{{}}}}}}}}}}}}}}}}}")
if size_average:
loss = L.mean()
else:
loss = L
return loss
def kl_regression_loss_logsumexp(scores, sample_density, gt_density, mc_dim=0, eps=0.0, exp_max=None, size_average=True): ##############################################
"""mc_dim is dimension of MC samples."""
# (scores has shape: (num_bboxes_in_batch, M)) (M == 128) (num_bboxes_in_batch can be different for each batch)
# (sample_density has shape: (num_bboxes_in_batch, M))
# (gt_density has shape: (num_bboxes_in_batch, M))
exp_val = scores - torch.log(sample_density + eps)
L = torch.logsumexp(exp_val, dim=1) - math.log(scores.shape[1]) - torch.mean(scores * (gt_density / (sample_density + eps)), dim=1)
if size_average:
loss = L.mean()
else:
loss = L
return loss
class IoUNetLossComputation(object):
"""
"""
def __init__(
self,
num_proposal=16,
proposal_min_overlap=0.5,
cls_agnostic_iou_pred=False,
num_pre_generated_boxes=50000,
sampling_type="default_iou",
proposal_sigma=None,
gt_sigma=None
):
self.num_proposal = num_proposal
self.proposal_min_overlap = proposal_min_overlap
self.cls_agnostic_iou_pred = cls_agnostic_iou_pred
self.num_pre_generated_boxes = num_pre_generated_boxes
self.sampling_type = sampling_type
if sampling_type == "default_iou":
self.jittered_boxes, self.jittered_box_ious = self.generate_jittered_boxes()
self.sample_probability = self.get_sample_probability()
self.proposal_sigma = proposal_sigma
self.gt_sigma = gt_sigma
def generate_jittered_boxes(self):
jittered_boxes = []
jittered_box_ious = []
base_box = torch.tensor([-0.5, -0.5, 1.0, 1.0])
for i in range(self.num_pre_generated_boxes):
box_per, box_iou = perturb_box(base_box.clone(), min_iou=self.proposal_min_overlap,
sigma_factor= [0.004, 0.01, 0.05, 0.1, 0.2, 0.3])
jittered_boxes.append(box_per)
jittered_box_ious.append(box_iou)
# TODO use bboxlist
return torch.stack(jittered_boxes, dim=0), torch.stack(jittered_box_ious, dim=0)
def get_sample_probability(self):
num_bins = 100
iou_hist = torch.histc(self.jittered_box_ious, bins=num_bins, min=self.proposal_min_overlap, max=1.0)
weight = 1 / (iou_hist + 1)
idx = (self.jittered_box_ious - self.proposal_min_overlap) / (1.0 - self.proposal_min_overlap)
idx = (idx * (num_bins - 1)).long()
sample_probability = weight[idx]
return sample_probability.view(-1)
def sample_jitter(self, num_samples):
sampled_ids = torch.multinomial(self.sample_probability, num_samples, replacement=True)
return self.jittered_boxes[sampled_ids, :], self.jittered_box_ious[sampled_ids]
def sample_jittered_boxes(self, gt_boxes):
"""
:param gt_boxes: BoxList containining the gt boxes for each image
:return:
"""
orig_mode_list = [b.mode for b in gt_boxes]
gt_boxes = [b.convert('xywh') for b in gt_boxes]
gt_iou_list = []
out_boxes = []
if self.sampling_type == "default_iou":
for gt_b in gt_boxes:
b = gt_b.bbox.view(-1, 4)
labels = gt_b.get_field("labels")
jittered_base_boxes, gt_iou = self.sample_jitter(b.shape[0] * self.num_proposal)
jittered_base_boxes = jittered_base_boxes.to(b.device)
gt_iou = gt_iou.to(b.device)
jittered_base_boxes_scaled = jittered_base_boxes.view(b.shape[0], self.num_proposal, 4) * b[:, 2:].repeat(1, 2).view(-1, 1, 4)
b_center = b[:, :2] + 0.5 * b[:, 2:]
jittered_base_boxes_scaled[..., :2] += b_center.view(-1, 1, 2)
labels = labels.view(-1,1).repeat(1, self.num_proposal)
new_box = BoxList(jittered_base_boxes_scaled.view(-1, 4), image_size=gt_b.size, mode='xywh')
new_box.add_field("labels", labels.view(-1))
out_boxes.append(new_box)
gt_iou_list.append(gt_iou)
elif self.sampling_type == "default_ml":
for gt_b in gt_boxes:
device = gt_b.bbox.device
b = gt_b.bbox.view(-1, 4).cpu()
labels = gt_b.get_field("labels")
out_boxes_list = []
for i in range(b.shape[0]):
proposals, _, _ = sample_box_gmm(b[i, :], self.proposal_sigma, self.gt_sigma, self.num_proposal - 1,
True)
gt_iou = iou(b[i:i+1, :], proposals)
proposals = proposals.to(device)
gt_iou = gt_iou.to(device)
gt_iou = gt_iou.view(-1, 1)
out_boxes_list.append(proposals)
gt_iou_list.append(gt_iou)
out_boxes_t = torch.cat(out_boxes_list, dim=0)
new_box = BoxList(out_boxes_t, image_size=gt_b.size, mode='xywh')
labels = labels.view(-1, 1).repeat(1, self.num_proposal)
new_box.add_field("labels", labels.view(-1))
out_boxes.append(new_box)
else:
raise ValueError
out_boxes = [b.convert(m) for b, m in zip(out_boxes, orig_mode_list)]
gt_boxes = [b.convert('xyxy') for b in gt_boxes]
# TODO is this safe?
self._gt_iou = 2 * torch.cat(gt_iou_list, dim=0) - 1
self._proposals = out_boxes
# TODO check output distribution
return out_boxes
def __call__(self, iou_score):
"""
Computes the loss for Faster R-CNN.
This requires that the subsample method has been called beforehand.
Arguments:
class_logits (list[Tensor])
box_regression (list[Tensor])
Returns:
classification_loss (Tensor)
box_loss (Tensor)
"""
device = iou_score.device
# TODO handle multi-class stuff
gt_iou = self._gt_iou
labels = cat([proposal.get_field("labels") for proposal in self._proposals], dim=0).long()
sampled_pos_inds_subset = torch.nonzero(labels > 0).squeeze(1)
labels_pos = labels[sampled_pos_inds_subset]
if self.cls_agnostic_iou_pred:
map_inds = torch.tensor([0], device=device)
else:
map_inds = labels_pos[:, None]
iou_loss = smooth_l1_loss(
iou_score[sampled_pos_inds_subset[:, None], map_inds],
gt_iou[sampled_pos_inds_subset],
size_average=False,
beta=1,
)
iou_loss = iou_loss / labels.numel()
return iou_loss
class KLIoUNetLossComputation(object):
"""
"""
def __init__(
self,
num_proposal=128,
gt_sigma=(0.125, 0.125),
proposal_sigma=((0.125, 0.125), (0.25, 0.25), (0.5, 0.5), (1.0, 1.0)),
cls_agnostic_iou_pred=False,
):
self.num_proposal = num_proposal
self.gt_sigma = gt_sigma
self.proposal_sigma = proposal_sigma
self.cls_agnostic_iou_pred = cls_agnostic_iou_pred
def sample_jittered_boxes(self, gt_boxes):
"""
:param gt_boxes: BoxList containining the gt boxes for each image
:return:
"""
orig_mode_list = [b.mode for b in gt_boxes]
gt_boxes = [b.convert('xywh') for b in gt_boxes]
proposal_density_list = []
gt_density_list = []
jittered_boxes = []
for gt_b in gt_boxes:
device = gt_b.bbox.device
b = gt_b.bbox.view(-1, 4).cpu()
labels = gt_b.get_field("labels")
out_boxes_list = []
for i in range(b.shape[0]):
proposals, proposal_density, gt_density = sample_box_gmm(b[i, :], self.proposal_sigma, self.gt_sigma,
self.num_proposal, False)
proposals = proposals.to(device)
proposal_density = proposal_density.to(device)
gt_density = gt_density.to(device)
out_boxes_list.append(proposals)
proposal_density_list.append(proposal_density)
gt_density_list.append(gt_density)
out_boxes_t = torch.cat(out_boxes_list, dim=0)
new_box = BoxList(out_boxes_t, image_size=gt_b.size, mode='xywh')
labels = labels.view(-1, 1).repeat(1, self.num_proposal)
new_box.add_field("labels", labels.view(-1))
jittered_boxes.append(new_box)
jittered_boxes = [b.convert(m) for b, m in zip(jittered_boxes, orig_mode_list)]
# TODO is this safe?
self._proposal_density = torch.cat(proposal_density_list, dim=0)
self._gt_density = torch.cat(gt_density_list, dim=0)
self._proposals = jittered_boxes
# TODO check output distribution
return jittered_boxes
def __call__(self, score):
"""
Computes the loss for Faster R-CNN.
This requires that the subsample method has been called beforehand.
Arguments:
class_logits (list[Tensor])
box_regression (list[Tensor])
Returns:
classification_loss (Tensor)
box_loss (Tensor)
"""
device = score.device
# TODO handle multi-class stuff
gt_density = self._gt_density
proposal_density = self._proposal_density
labels = cat([proposal.get_field("labels") for proposal in self._proposals], dim=0).long()
sampled_pos_inds_subset = torch.nonzero(labels > 0).squeeze(1)
labels_pos = labels[sampled_pos_inds_subset]
if self.cls_agnostic_iou_pred:
map_inds = torch.tensor([0], device=device)
else:
map_inds = labels_pos[:, None]
iou_loss = kl_regression_loss(
score[sampled_pos_inds_subset[:, None], map_inds].view(-1, self.num_proposal),
proposal_density[sampled_pos_inds_subset].view(-1, self.num_proposal),
gt_density[sampled_pos_inds_subset].view(-1, self.num_proposal),
size_average=False,
mc_dim=1)
iou_loss = iou_loss.sum() / (labels.numel() / self.num_proposal)
return iou_loss
class MLIoUNetLossComputation(object):
"""
"""
def __init__(
self,
num_proposal=128,
gt_sigma=(0.125, 0.125),
proposal_sigma=((0.125, 0.125), (0.25, 0.25), (0.5, 0.5), (1.0, 1.0)),
cls_agnostic_iou_pred=False,
exp_clamp=None
):
self.num_proposal = num_proposal
self.gt_sigma = gt_sigma
self.proposal_sigma = proposal_sigma
self.cls_agnostic_iou_pred = cls_agnostic_iou_pred
self.exp_clamp = exp_clamp
def sample_jittered_boxes(self, gt_boxes):
"""
:param gt_boxes: BoxList containining the gt boxes for each image
:return:
"""
orig_mode_list = [b.mode for b in gt_boxes]
gt_boxes = [b.convert('xywh') for b in gt_boxes]
proposal_density_list = []
gt_density_list = []
jittered_boxes = []
for gt_b in gt_boxes:
device = gt_b.bbox.device
b = gt_b.bbox.view(-1, 4).cpu()
labels = gt_b.get_field("labels")
out_boxes_list = []
for i in range(b.shape[0]):
proposals, proposal_density, gt_density = sample_box_gmm(b[i, :], self.proposal_sigma, self.gt_sigma,
self.num_proposal-1, True)
proposals = proposals.to(device)
proposal_density = proposal_density.to(device)
gt_density = gt_density.to(device)
out_boxes_list.append(proposals)
proposal_density_list.append(proposal_density)
gt_density_list.append(gt_density)
out_boxes_t = torch.cat(out_boxes_list, dim=0)
new_box = BoxList(out_boxes_t, image_size=gt_b.size, mode='xywh')
labels = labels.view(-1, 1).repeat(1, self.num_proposal)
new_box.add_field("labels", labels.view(-1))
jittered_boxes.append(new_box)
jittered_boxes = [b.convert(m) for b, m in zip(jittered_boxes, orig_mode_list)]
# TODO is this safe?
self._proposal_density = torch.cat(proposal_density_list, dim=0)
self._gt_density = torch.cat(gt_density_list, dim=0)
self._proposals = jittered_boxes
# TODO check output distribution
return jittered_boxes
def __call__(self, score):
"""
Computes the loss for Faster R-CNN.
This requires that the subsample method has been called beforehand.
Arguments:
class_logits (list[Tensor])
box_regression (list[Tensor])
Returns:
classification_loss (Tensor)
box_loss (Tensor)
"""
device = score.device
# TODO handle multi-class stuff
gt_density = self._gt_density
proposal_density = self._proposal_density
labels = cat([proposal.get_field("labels") for proposal in self._proposals], dim=0).long()
sampled_pos_inds_subset = torch.nonzero(labels > 0).squeeze(1)
labels_pos = labels[sampled_pos_inds_subset]
if self.cls_agnostic_iou_pred:
map_inds = torch.tensor([0], device=device)
else:
map_inds = labels_pos[:, None]
iou_loss = ml_regression_loss(
score[sampled_pos_inds_subset[:, None], map_inds].view(-1, self.num_proposal),
proposal_density[sampled_pos_inds_subset].view(-1, self.num_proposal),
gt_density[sampled_pos_inds_subset].view(-1, self.num_proposal),
exp_max=10,
size_average=False,
mc_dim=1)
iou_loss = iou_loss.sum() / (labels.numel() / self.num_proposal)
return iou_loss
class LossComputation_mlis(object):
"""
"""
def __init__(
self,
num_proposal=128,
gt_sigma=(0.125, 0.125),
proposal_sigma=((0.125, 0.125), (0.25, 0.25), (0.5, 0.5), (1.0, 1.0)),
cls_agnostic_iou_pred=False,
exp_clamp=None
):
self.num_proposal = num_proposal
self.gt_sigma = gt_sigma
self.proposal_sigma = proposal_sigma
self.cls_agnostic_iou_pred = cls_agnostic_iou_pred
self.exp_clamp = exp_clamp
def sample_jittered_boxes(self, gt_boxes):
"""
:param gt_boxes: BoxList containining the gt boxes for each image
:return:
"""
orig_mode_list = [b.mode for b in gt_boxes]
gt_boxes = [b.convert('xywh') for b in gt_boxes]
proposal_density_list = []
gt_density_list = []
jittered_boxes = []
for gt_b in gt_boxes:
device = gt_b.bbox.device
b = gt_b.bbox.view(-1, 4).cpu()
labels = gt_b.get_field("labels")
out_boxes_list = []
for i in range(b.shape[0]):
proposals, proposal_density, gt_density = sample_box_gmm(b[i, :], self.proposal_sigma, self.gt_sigma,
self.num_proposal-1, True)
proposals = proposals.to(device)
proposal_density = proposal_density.to(device)
gt_density = gt_density.to(device)
out_boxes_list.append(proposals)
proposal_density_list.append(proposal_density)
gt_density_list.append(gt_density)
out_boxes_t = torch.cat(out_boxes_list, dim=0)
new_box = BoxList(out_boxes_t, image_size=gt_b.size, mode='xywh')
labels = labels.view(-1, 1).repeat(1, self.num_proposal)
new_box.add_field("labels", labels.view(-1))
jittered_boxes.append(new_box)
jittered_boxes = [b.convert(m) for b, m in zip(jittered_boxes, orig_mode_list)]
# TODO is this safe?
self._proposal_density = torch.cat(proposal_density_list, dim=0)
self._gt_density = torch.cat(gt_density_list, dim=0)
self._proposals = jittered_boxes
# TODO check output distribution
return jittered_boxes
def __call__(self, score):
"""
Computes the loss for Faster R-CNN.
This requires that the subsample method has been called beforehand.
Arguments:
class_logits (list[Tensor])
box_regression (list[Tensor])
Returns:
classification_loss (Tensor)
box_loss (Tensor)
"""
# (score has shape: (num_bboxes, 81)) (num_bboxes can be different for every batch, e.g. 13952, 20608, ...)
device = score.device
# TODO handle multi-class stuff
gt_density = self._gt_density # (shape: (num_bboxes))
proposal_density = self._proposal_density # (shape: (num_bboxes))
labels = cat([proposal.get_field("labels") for proposal in self._proposals], dim=0).long()
sampled_pos_inds_subset = torch.nonzero(labels > 0).squeeze(1)
labels_pos = labels[sampled_pos_inds_subset]
if self.cls_agnostic_iou_pred:
map_inds = torch.tensor([0], device=device)
else:
map_inds = labels_pos[:, None]
# (score[sampled_pos_inds_subset[:, None], map_inds].view(-1, self.num_proposal) has shape: (num_gt_bboxes_in_batch, M)) (M == 128) (num_bboxes_in_batch can be different for each batch, e.g. 151, 109, ...)
# (num_gt_bboxes_in_batch == num_bboxes/M) (score has shape: (num_bboxes, 81))
# (proposal_density[sampled_pos_inds_subset].view(-1, self.num_proposal) has shape: (num_gt-bboxes_in_batch, M))
# (gt_density[sampled_pos_inds_subset].view(-1, self.num_proposal) has shape: (num_gt_bboxes_in_batch, M))
iou_loss = ml_regression_loss_logsumexp(
score[sampled_pos_inds_subset[:, None], map_inds].view(-1, self.num_proposal),
proposal_density[sampled_pos_inds_subset].view(-1, self.num_proposal),
gt_density[sampled_pos_inds_subset].view(-1, self.num_proposal),
exp_max=10,
size_average=False,
mc_dim=1)
# (iou_loss has shape: (num_bboxes_in_batch)) (num_bboxes_in_batch can be different for each batch, e.g. 49, 68, 142, 127, ...)
iou_loss = iou_loss.sum() / (labels.numel() / self.num_proposal)
# (labels.numel() / self.num_proposal == num_bboxes_in_batch)
# (so, could also just have done iou_loss = torch.mean(iou_loss)?)
return iou_loss
class LossComputation_kldis(object):
"""
"""
def __init__(
self,
num_proposal=128,
gt_sigma=(0.125, 0.125),
proposal_sigma=((0.125, 0.125), (0.25, 0.25), (0.5, 0.5), (1.0, 1.0)),
cls_agnostic_iou_pred=False,
exp_clamp=None
):
self.num_proposal = num_proposal
self.gt_sigma = gt_sigma
self.proposal_sigma = proposal_sigma
self.cls_agnostic_iou_pred = cls_agnostic_iou_pred
self.exp_clamp = exp_clamp
def sample_jittered_boxes(self, gt_boxes):
"""
:param gt_boxes: BoxList containining the gt boxes for each image
:return:
"""
orig_mode_list = [b.mode for b in gt_boxes]
gt_boxes = [b.convert('xywh') for b in gt_boxes]
proposal_density_list = []
gt_density_list = []
jittered_boxes = []
for gt_b in gt_boxes:
device = gt_b.bbox.device
b = gt_b.bbox.view(-1, 4).cpu()
labels = gt_b.get_field("labels")
out_boxes_list = []
for i in range(b.shape[0]):
proposals, proposal_density, gt_density = sample_box_gmm(b[i, :], self.proposal_sigma, self.gt_sigma,
self.num_proposal, False)
proposals = proposals.to(device)
proposal_density = proposal_density.to(device)
gt_density = gt_density.to(device)
out_boxes_list.append(proposals)
proposal_density_list.append(proposal_density)
gt_density_list.append(gt_density)
out_boxes_t = torch.cat(out_boxes_list, dim=0)
new_box = BoxList(out_boxes_t, image_size=gt_b.size, mode='xywh')
labels = labels.view(-1, 1).repeat(1, self.num_proposal)
new_box.add_field("labels", labels.view(-1))
jittered_boxes.append(new_box)
jittered_boxes = [b.convert(m) for b, m in zip(jittered_boxes, orig_mode_list)]
# TODO is this safe?
self._proposal_density = torch.cat(proposal_density_list, dim=0)
self._gt_density = torch.cat(gt_density_list, dim=0)
self._proposals = jittered_boxes
# TODO check output distribution
return jittered_boxes
def __call__(self, score):
"""
Computes the loss for Faster R-CNN.
This requires that the subsample method has been called beforehand.
Arguments:
class_logits (list[Tensor])
box_regression (list[Tensor])
Returns:
classification_loss (Tensor)
box_loss (Tensor)
"""
# (score has shape: (num_bboxes, 81)) (num_bboxes can be different for every batch, e.g. 13952, 20608, ...)
device = score.device
# TODO handle multi-class stuff
gt_density = self._gt_density # (shape: (num_bboxes))
proposal_density = self._proposal_density # (shape: (num_bboxes))
labels = cat([proposal.get_field("labels") for proposal in self._proposals], dim=0).long()
sampled_pos_inds_subset = torch.nonzero(labels > 0).squeeze(1)
labels_pos = labels[sampled_pos_inds_subset]
if self.cls_agnostic_iou_pred:
map_inds = torch.tensor([0], device=device)
else:
map_inds = labels_pos[:, None]
# (score[sampled_pos_inds_subset[:, None], map_inds].view(-1, self.num_proposal) has shape: (num_gt_bboxes_in_batch, M)) (M == 128) (num_bboxes_in_batch can be different for each batch, e.g. 151, 109, ...)
# (num_gt_bboxes_in_batch == num_bboxes/M) (score has shape: (num_bboxes, 81))
# (proposal_density[sampled_pos_inds_subset].view(-1, self.num_proposal) has shape: (num_gt-bboxes_in_batch, M))
# (gt_density[sampled_pos_inds_subset].view(-1, self.num_proposal) has shape: (num_gt_bboxes_in_batch, M))
iou_loss = kl_regression_loss_logsumexp(
score[sampled_pos_inds_subset[:, None], map_inds].view(-1, self.num_proposal),
proposal_density[sampled_pos_inds_subset].view(-1, self.num_proposal),
gt_density[sampled_pos_inds_subset].view(-1, self.num_proposal),
exp_max=10,
size_average=False,
mc_dim=1)
# (iou_loss has shape: (num_bboxes_in_batch)) (num_bboxes_in_batch can be different for each batch, e.g. 49, 68, 142, 127, ...)
iou_loss = iou_loss.sum() / (labels.numel() / self.num_proposal)
# (labels.numel() / self.num_proposal == num_bboxes_in_batch)
# (so, could also just have done iou_loss = torch.mean(iou_loss)?)
return iou_loss
class LossComputation_nce(object): #########################################################################
"""
"""
def __init__(
self,
num_proposal=128,
gt_sigma=(0.125, 0.125),
proposal_sigma=((0.125, 0.125), (0.25, 0.25), (0.5, 0.5), (1.0, 1.0)),
cls_agnostic_iou_pred=False,
exp_clamp=None
):
self.num_proposal = num_proposal
self.gt_sigma = gt_sigma
self.proposal_sigma = proposal_sigma
self.cls_agnostic_iou_pred = cls_agnostic_iou_pred
self.exp_clamp = exp_clamp
def sample_jittered_boxes(self, gt_boxes): ######################################################
"""
:param gt_boxes: BoxList containining the gt boxes for each image
:return:
"""
orig_mode_list = [b.mode for b in gt_boxes]
gt_boxes = [b.convert('xywh') for b in gt_boxes]
proposal_density_list = []
gt_density_list = []
jittered_boxes = []
for gt_b in gt_boxes:
device = gt_b.bbox.device
b = gt_b.bbox.view(-1, 4).cpu()
labels = gt_b.get_field("labels")
out_boxes_list = []
for i in range(b.shape[0]):
proposals, proposal_density, gt_density, proposal_density_zero = nce_sample_box_gmm(b[i, :], self.proposal_sigma, self.gt_sigma,
self.num_proposal-1, True)
proposals = proposals.to(device)
proposal_density = proposal_density.to(device)
gt_density = gt_density.to(device)
out_boxes_list.append(proposals)
proposal_density_list.append(proposal_density)
gt_density_list.append(gt_density)
out_boxes_t = torch.cat(out_boxes_list, dim=0)
new_box = BoxList(out_boxes_t, image_size=gt_b.size, mode='xywh')
labels = labels.view(-1, 1).repeat(1, self.num_proposal)
new_box.add_field("labels", labels.view(-1))
jittered_boxes.append(new_box)
jittered_boxes = [b.convert(m) for b, m in zip(jittered_boxes, orig_mode_list)]
# TODO is this safe?
self._proposal_density = torch.cat(proposal_density_list, dim=0)
self._gt_density = torch.cat(gt_density_list, dim=0)
self._proposals = jittered_boxes
proposal_density_zero = proposal_density_zero.to(device) # (proposal_density_zero has shape: (num_proposl-1)) (all values are identical and constant) (this constant value only depends on std)
self._proposal_density_zero = proposal_density_zero
# TODO check output distribution
return jittered_boxes
def __call__(self, score): ##############################################################
"""
Computes the loss for Faster R-CNN.
This requires that the subsample method has been called beforehand.
Arguments:
class_logits (list[Tensor])
box_regression (list[Tensor])
Returns:
classification_loss (Tensor)
box_loss (Tensor)
"""
# (score has shape: (num_bboxes, 81)) (num_bboxes can be different for every batch, e.g. 13952, 20608, ...)
device = score.device
# TODO handle multi-class stuff
gt_density = self._gt_density # (shape: (num_bboxes))
proposal_density = self._proposal_density # (shape: (num_bboxes))
proposal_density_zero = self._proposal_density_zero # (shape: (num_proposl-1))
labels = cat([proposal.get_field("labels") for proposal in self._proposals], dim=0).long()
sampled_pos_inds_subset = torch.nonzero(labels > 0).squeeze(1)
labels_pos = labels[sampled_pos_inds_subset]
if self.cls_agnostic_iou_pred:
map_inds = torch.tensor([0], device=device)
else:
map_inds = labels_pos[:, None]
score = score[sampled_pos_inds_subset[:, None], map_inds].view(-1, self.num_proposal)
# (shape: (num_gt_bboxes_in_batch, M))
proposal_density = proposal_density[sampled_pos_inds_subset].view(-1, self.num_proposal)
# (shape: (num_gt_bboxes_in_batch, M))
q_y_samples = proposal_density[:, 1:] # (shape: (num_gt_bboxes_in_batch, M-1))
q_ys = proposal_density_zero[0]*torch.ones(q_y_samples.size(0)) # (shape: (num_gt_bboxes_in_batch))
q_ys = q_ys.to(device)
scores_gt = score[:, 0] # (shape: (num_gt_bboxes_in_batch))
scores_samples = score[:, 1:] # (shape: (num_gt_bboxes_in_batch, M-1))
loss = -torch.mean(scores_gt-torch.log(q_ys) - torch.log(torch.exp(scores_gt-torch.log(q_ys)) + torch.sum(torch.exp(scores_samples-torch.log(q_y_samples)), dim=1)))
return loss
class LossComputation_dsm(object): #########################################################################
"""
"""
def __init__(
self,
num_proposal=128,
gt_sigma=(0.125, 0.125),
proposal_sigma=((0.125, 0.125), (0.25, 0.25), (0.5, 0.5), (1.0, 1.0)),
cls_agnostic_iou_pred=False,
exp_clamp=None
):
self.num_proposal = num_proposal
self.gt_sigma = gt_sigma
self.proposal_sigma = proposal_sigma
self.dctd_proposal_sigma = ((0.0375, 0.0375), (0.075, 0.075), (0.15, 0.15))
self.cls_agnostic_iou_pred = cls_agnostic_iou_pred
self.exp_clamp = exp_clamp
def sample_jittered_boxes(self, gt_boxes): ######################################################
"""
:param gt_boxes: BoxList containining the gt boxes for each image
:return:
"""
orig_mode_list = [b.mode for b in gt_boxes]
gt_boxes = [b.convert('xywh') for b in gt_boxes]
proposal_density_list = []
gt_density_list = []
jittered_boxes = []
for gt_b in gt_boxes:
device = gt_b.bbox.device
b = gt_b.bbox.view(-1, 4).cpu()
labels = gt_b.get_field("labels")
out_boxes_list = []
for i in range(b.shape[0]):
proposals, proposal_density, gt_density = sample_box_gmm(b[i, :], self.proposal_sigma, self.gt_sigma,
self.num_proposal, False)
proposals = proposals.to(device)
proposal_density = proposal_density.to(device)
gt_density = gt_density.to(device)
out_boxes_list.append(proposals)
proposal_density_list.append(proposal_density)
gt_density_list.append(gt_density)
out_boxes_t = torch.cat(out_boxes_list, dim=0)
new_box = BoxList(out_boxes_t, image_size=gt_b.size, mode='xywh')
labels = labels.view(-1, 1).repeat(1, self.num_proposal)
new_box.add_field("labels", labels.view(-1))
jittered_boxes.append(new_box)
jittered_boxes = [b.convert(m) for b, m in zip(jittered_boxes, orig_mode_list)]
# TODO is this safe?
self._proposal_density = torch.cat(proposal_density_list, dim=0)
self._gt_density = torch.cat(gt_density_list, dim=0)
self._proposals = jittered_boxes
# TODO check output distribution
return jittered_boxes
def __call__(self, fs, ys, y_samples): ##############################################################
# (fs has shape: (num_gt_bboxes_in_batch*M, 81))
# (ys is a list of 16 elements, each element is a BoxList)
# (y_samples is a list of 16 elements, each element is a BoxList)
# print (fs.size())
# print (ys)
# print (len(ys))
# print ("%%%")
# print (y_samples)
# print (len(y_samples))
# print ("%%%")
sigma = self.proposal_sigma[0][0]
# print (sigma)
# num_gt_bboxes_in_batch = 0
# for y in ys:
# num_gt_bboxes_in_batch += y.bbox.size(0)
# print ("num_gt_bboxes_in_batch: %d" % num_gt_bboxes_in_batch)
# num_sampled_bboxes_in_batch = 0
# for y_sample in y_samples:
# num_sampled_bboxes_in_batch += y_sample.bbox.size(0)
# print ("num_sampled_bboxes_in_batch: %d" % num_sampled_bboxes_in_batch)
# print ("num_gt_bboxes_in_batch*M: %d" % (num_gt_bboxes_in_batch*128))
device = fs.device
labels = cat([proposal.get_field("labels") for proposal in self._proposals], dim=0).long()
sampled_pos_inds_subset = torch.nonzero(labels > 0).squeeze(1)
labels_pos = labels[sampled_pos_inds_subset]
if self.cls_agnostic_iou_pred:
map_inds = torch.tensor([0], device=device)
else:
map_inds = labels_pos[:, None]
fs = fs[sampled_pos_inds_subset[:, None], map_inds] # (shape: (num_gt_bboxes_in_batch*M, 1))
fs = fs.squeeze(1) # (shape: (num_gt_bboxes_in_batch*M))
# print (fs.size())
losses = []
num_processed_bboxes = 0
for y_samples_i, ys_i in zip(y_samples, ys):
# (ys_i.bbox has shape: (num_gt_bboxes_in_img, 4)) (num_gt_bboxes_in_img can be different for different images)
# (y_samples_i.bbox has shape: (num_gt_bboxes_in_img*M, 4))
# print (ys_i.bbox.size())
# print (y_samples_i.bbox.size())
num_bboxes_i = y_samples_i.bbox.size(0)
# print (num_bboxes_i)
fs_i = fs[num_processed_bboxes:(num_processed_bboxes + num_bboxes_i)] # (shape: (num_gt_bboxes_in_img*M))
# print (fs_i.size())
num_processed_bboxes += num_bboxes_i
# print (num_processed_bboxes)
# print (num_bboxes_i)
if (fs.size(0) > 30000) and (num_bboxes_i > 2500):
continue
grad_y_fs_i = torch.autograd.grad(fs_i.sum(), y_samples_i.bbox, create_graph=True)[0]
# (shape: (num_gt_bboxes_in_img*M, 4)) (like y_samples_i.bbox)
# print (grad_y_fs_i)
# print (grad_y_fs_i.size())
ys_i_ = ys_i.bbox.view(ys_i.bbox.size(0), 1, -1).expand(-1, self.num_proposal, -1) # (shape: (num_gt_bboxes_in_img, M, 4))
# print (ys_i_.size())
ys_i_ = ys_i_.reshape(ys_i.bbox.size(0)*self.num_proposal, -1) # (shape: (num_gt_bboxes_in_img*M, 4))
ys_i = ys_i_
# print (ys_i.size())
loss_i = torch.norm(grad_y_fs_i + (rect_to_rel(y_samples_i.bbox)-rect_to_rel(ys_i))/(sigma**2), dim=1)**2 # (shape: (num_gt_bboxes_in_img*M))
# print (loss_i.size())
losses.append(loss_i)
# print ("{{{{{{{}}}}}}}")
loss = torch.cat(losses) # (shape: (num_gt_bboxes_in_batch*M))
# print (loss.size())
loss = torch.mean(loss)
return loss
class LossComputation_mlmcmc(object): #########################################################################
"""
"""
def __init__(
self,
num_proposal=128,
gt_sigma=(0.125, 0.125),
proposal_sigma=((0.125, 0.125), (0.25, 0.25), (0.5, 0.5), (1.0, 1.0)),
cls_agnostic_iou_pred=False,
exp_clamp=None
):
self.num_proposal = num_proposal
self.gt_sigma = gt_sigma
self.proposal_sigma = proposal_sigma
self.cls_agnostic_iou_pred = cls_agnostic_iou_pred
self.exp_clamp = exp_clamp
def copy_targets(self, gt_boxes): ######################################################
"""
:param gt_boxes: BoxList containining the gt boxes for each image
:return:
"""
orig_mode_list = [b.mode for b in gt_boxes]
gt_boxes = [b.convert('xywh') for b in gt_boxes]
proposal_density_list = []
gt_density_list = []
jittered_boxes = []
for gt_b in gt_boxes:
device = gt_b.bbox.device
b = gt_b.bbox.view(-1, 4).cpu()
labels = gt_b.get_field("labels")
out_boxes_list = []
for i in range(b.shape[0]):
proposals, proposal_density, gt_density = sample_box_gmm(b[i, :], ((1.0e-12, 1.0e-12), (1.0e-12, 1.0e-12)), self.gt_sigma,
self.num_proposal-1, True)
proposals = proposals.to(device)
proposal_density = proposal_density.to(device)
gt_density = gt_density.to(device)
out_boxes_list.append(proposals)
proposal_density_list.append(proposal_density)
gt_density_list.append(gt_density)
out_boxes_t = torch.cat(out_boxes_list, dim=0)
new_box = BoxList(out_boxes_t, image_size=gt_b.size, mode='xywh')
labels = labels.view(-1, 1).repeat(1, self.num_proposal)
new_box.add_field("labels", labels.view(-1))
jittered_boxes.append(new_box)
jittered_boxes = [b.convert(m) for b, m in zip(jittered_boxes, orig_mode_list)]
# TODO is this safe?
self._proposal_density = torch.cat(proposal_density_list, dim=0)
self._gt_density = torch.cat(gt_density_list, dim=0)
self._proposals = jittered_boxes
# TODO check output distribution
return jittered_boxes
def __call__(self, fs, f_samples, target_labels): ##############################################################
"""
Computes the loss for Faster R-CNN.
This requires that the subsample method has been called beforehand.
Arguments:
class_logits (list[Tensor])
box_regression (list[Tensor])
Returns:
classification_loss (Tensor)
box_loss (Tensor)
"""
# print ("__call__ - 1")
# (fs has shape: (num_gt_bboxes_in_batch))
# (f_samples has shape: (num_gt_bboxes_in_batch*M))
# (target_labels has shape: (num_gt_bboxes_in_batch))
# print (fs.size())
# print (f_samples.size())
# print (target_labels.size())
device = fs.device
# print ("__call__ - 2")
labels = cat([proposal.get_field("labels") for proposal in self._proposals], dim=0).long() # (shape: (num_gt_bboxes_in_batch*M))
# print (labels.size())
# print ("__call__ - 3")
samples_pos_inds_subset = torch.nonzero(labels > 0).squeeze(1) # (shape: (num_gt_bboxes_in_batch*M)) (it seems)
# print (samples_pos_inds_subset.size())
target_pos_inds_subset = torch.nonzero(target_labels > 0).squeeze(1) # (shape: (num_gt_bboxes_in_batch)) (it seems)
# print (target_pos_inds_subset.size())
# print ("__call__ - 4")
fs = fs[target_pos_inds_subset] # (shape: (num_gt_bboxes_in_batch))
# print (fs.size())
f_samples = f_samples[samples_pos_inds_subset] # (shape: (num_gt_bboxes_in_batch*M))
# print (f_samples.size())
f_samples = f_samples.view(-1, self.num_proposal) # (shape: (num_gt_bboxes_in_batch, M))
# print (f_samples.size())
# print ("__call__ - 5")
loss = torch.mean(f_samples, dim=1) - fs # (shape: (num_gt_bboxes_in_batch))
# print (loss.size())
loss = torch.mean(loss)
# print ("__call__ - 6")
return loss
class LossComputation_nceplus(object): #########################################################################
"""
"""
def __init__(
self,
num_proposal=128,
gt_sigma=(0.125, 0.125),
proposal_sigma=((0.125, 0.125), (0.25, 0.25), (0.5, 0.5), (1.0, 1.0)),
cls_agnostic_iou_pred=False,
exp_clamp=None
):
self.num_proposal = num_proposal
self.gt_sigma = gt_sigma
self.proposal_sigma = proposal_sigma
self.cls_agnostic_iou_pred = cls_agnostic_iou_pred
self.exp_clamp = exp_clamp
def sample_jittered_boxes(self, gt_boxes, beta): ######################################################
"""
:param gt_boxes: BoxList containining the gt boxes for each image
:return:
"""
# print (self.proposal_sigma)
# print (beta)
orig_mode_list = [b.mode for b in gt_boxes]
gt_boxes = [b.convert('xywh') for b in gt_boxes]
proposal_density_list = []
gt_density_list = []
jittered_boxes = []
for gt_b in gt_boxes:
device = gt_b.bbox.device
b = gt_b.bbox.view(-1, 4).cpu()
labels = gt_b.get_field("labels")
out_boxes_list = []
for i in range(b.shape[0]):
proposals, proposal_density, gt_density, proposal_density_zero = nce_sample_box_gmm(b[i, :], self.proposal_sigma, self.gt_sigma,
self.num_proposal-1, True)
proposals2, proposal_density2, _ = nce_sample_box_gmm2(b[i, :], self.proposal_sigma, beta, self.gt_sigma, 1, False)
# (proposals has shape: (M, 4))
# (proposal_density has shape: (M))
# (proposals2 has shape: (1, 4))
# (proposal_density2 has shape: (1))
# print (b[i, :])
# print ("{{{}}}")
#
# print (proposals[0:17])
# print ("{{{}}}")
#
# print (proposals2)
# print ("{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}")
proposals = torch.cat([proposals, proposals2]) # (shape: (M+1, 4))
proposal_density = torch.cat([proposal_density, proposal_density2]) # (shape: (M+1))
# if i == 0:
# print (proposals[0:5])
# print ("@@@@@@@@@@")
proposals = proposals.to(device)
proposal_density = proposal_density.to(device)
gt_density = gt_density.to(device)
out_boxes_list.append(proposals)
proposal_density_list.append(proposal_density)
gt_density_list.append(gt_density)
# print ("{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}")
out_boxes_t = torch.cat(out_boxes_list, dim=0)
new_box = BoxList(out_boxes_t, image_size=gt_b.size, mode='xywh')
labels = labels.view(-1, 1).repeat(1, self.num_proposal+1)
new_box.add_field("labels", labels.view(-1))
jittered_boxes.append(new_box)
jittered_boxes = [b.convert(m) for b, m in zip(jittered_boxes, orig_mode_list)]
# TODO is this safe?
self._proposal_density = torch.cat(proposal_density_list, dim=0)
self._gt_density = torch.cat(gt_density_list, dim=0)
self._proposals = jittered_boxes
proposal_density_zero = proposal_density_zero.to(device) # (proposal_density_zero has shape: (num_proposl-1)) (all values are identical and constant) (this constant value only depends on std)
self._proposal_density_zero = proposal_density_zero
# TODO check output distribution
return jittered_boxes
def __call__(self, score): ##############################################################
"""
Computes the loss for Faster R-CNN.
This requires that the subsample method has been called beforehand.
Arguments:
class_logits (list[Tensor])
box_regression (list[Tensor])
Returns:
classification_loss (Tensor)
box_loss (Tensor)
"""
# (score has shape: (num_bboxes, 81)) (num_bboxes can be different for every batch, e.g. 13952, 20608, ...)
device = score.device
# TODO handle multi-class stuff
gt_density = self._gt_density # (shape: (num_bboxes))
proposal_density = self._proposal_density # (shape: (num_bboxes))
proposal_density_zero = self._proposal_density_zero # (shape: (num_proposl-1))
labels = cat([proposal.get_field("labels") for proposal in self._proposals], dim=0).long()
sampled_pos_inds_subset = torch.nonzero(labels > 0).squeeze(1)
labels_pos = labels[sampled_pos_inds_subset]
if self.cls_agnostic_iou_pred:
map_inds = torch.tensor([0], device=device)
else:
map_inds = labels_pos[:, None]
score = score[sampled_pos_inds_subset[:, None], map_inds].view(-1, self.num_proposal+1)
# (shape: (num_gt_bboxes_in_batch, M+1))
proposal_density = proposal_density[sampled_pos_inds_subset].view(-1, self.num_proposal+1)
# (shape: (num_gt_bboxes_in_batch, M+1))
q_y_samples = proposal_density[:, 1:self.num_proposal] # (shape: (num_gt_bboxes_in_batch, M-1))
# q_ys = proposal_density_zero[0]*torch.ones(q_y_samples.size(0)) # (shape: (num_gt_bboxes_in_batch))
# q_ys = q_ys.to(device)
q_ys2 = proposal_density[:, self.num_proposal] # (shape: (num_gt_bboxes_in_batch))
# scores_gt = score[:, 0] # (shape: (num_gt_bboxes_in_batch))
scores_gt2 = score[:, self.num_proposal] # (shape: (num_gt_bboxes_in_batch))
scores_samples = score[:, 1:self.num_proposal] # (shape: (num_gt_bboxes_in_batch, M-1))
loss = -torch.mean(scores_gt2-torch.log(q_ys2) - torch.log(torch.exp(scores_gt2-torch.log(q_ys2)) + torch.sum(torch.exp(scores_samples-torch.log(q_y_samples)), dim=1)))
return loss
def make_roi_iou_loss_evaluator(cfg):
num_proposal = cfg.MODEL.ROI_IOU_HEAD.NUM_TRAIN_PROPOSALS
proposal_min_overlap = cfg.MODEL.ROI_IOU_HEAD.MIN_OVERLAP_PROPOSAL
cls_agnostic_iou_pred = cfg.MODEL.CLS_AGNOSTIC_IOU_PRED
loss_type = cfg.MODEL.ROI_IOU_HEAD.LOSS_TYPE
sampling_type = cfg.MODEL.ROI_IOU_HEAD.PROPOSAL_SAMPLING_TYPE
proposal_sigma = cfg.MODEL.ROI_IOU_HEAD.PROPOSAL_SIGMA
gt_sigma = cfg.MODEL.ROI_IOU_HEAD.GT_SIGMA
if loss_type == "ML-IS":
loss_evaluator = LossComputation_mlis(num_proposal=num_proposal,
gt_sigma=gt_sigma,
proposal_sigma=proposal_sigma,
cls_agnostic_iou_pred=cls_agnostic_iou_pred,
exp_clamp=None
)
elif loss_type == "KLD-IS":
loss_evaluator = LossComputation_kldis(num_proposal=num_proposal,
gt_sigma=gt_sigma,
proposal_sigma=proposal_sigma,
cls_agnostic_iou_pred=cls_agnostic_iou_pred,
exp_clamp=None
)
elif loss_type == "NCE":
loss_evaluator = LossComputation_nce(num_proposal=num_proposal,
gt_sigma=gt_sigma,
proposal_sigma=proposal_sigma,
cls_agnostic_iou_pred=cls_agnostic_iou_pred,
exp_clamp=None
)
elif loss_type == "DSM":
loss_evaluator = LossComputation_dsm(num_proposal=num_proposal,
gt_sigma=gt_sigma,
proposal_sigma=proposal_sigma,
cls_agnostic_iou_pred=cls_agnostic_iou_pred,
exp_clamp=None
)
elif loss_type == "ML-MCMC":
loss_evaluator = LossComputation_mlmcmc(num_proposal=num_proposal,
gt_sigma=gt_sigma,
proposal_sigma=proposal_sigma,
cls_agnostic_iou_pred=cls_agnostic_iou_pred,
exp_clamp=None
)
elif loss_type == "NCE+":
loss_evaluator = LossComputation_nceplus(num_proposal=num_proposal,
gt_sigma=gt_sigma,
proposal_sigma=proposal_sigma,
cls_agnostic_iou_pred=cls_agnostic_iou_pred,
exp_clamp=None
)
else:
raise ValueError
return loss_evaluator
| 38.967033
| 214
| 0.577537
| 8,202
| 63,828
| 4.179712
| 0.048281
| 0.055569
| 0.032379
| 0.01896
| 0.799691
| 0.773496
| 0.748439
| 0.737617
| 0.721341
| 0.71011
| 0
| 0.022259
| 0.296155
| 63,828
| 1,637
| 215
| 38.990837
| 0.740835
| 0.195149
| 0
| 0.701676
| 0
| 0
| 0.007639
| 0
| 0
| 0
| 0
| 0.010385
| 0.006704
| 1
| 0.054749
| false
| 0
| 0.010056
| 0.001117
| 0.121788
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8646f3b9872326e55f4a737c981fa618ebe97292
| 13,551
|
py
|
Python
|
minihinch/gui/fonts/arial_50.py
|
aabbtree77/esp32-mqtt-experiments
|
c008161a2cfe8607d6e3d5b635cbccc98b4bd553
|
[
"MIT"
] | 198
|
2018-08-31T22:30:28.000Z
|
2022-03-27T14:21:36.000Z
|
minihinch/gui/fonts/arial_50.py
|
aabbtree77/esp32-mqtt-experiments
|
c008161a2cfe8607d6e3d5b635cbccc98b4bd553
|
[
"MIT"
] | 24
|
2018-10-01T23:44:25.000Z
|
2022-01-08T09:05:14.000Z
|
minihinch/gui/fonts/arial_50.py
|
aabbtree77/esp32-mqtt-experiments
|
c008161a2cfe8607d6e3d5b635cbccc98b4bd553
|
[
"MIT"
] | 44
|
2018-09-30T02:09:56.000Z
|
2022-03-25T07:37:36.000Z
|
# Code generated by font_to_py.py.
# Font: Arial.ttf Char set: 0123456789:
# Cmd: ./font_to_py.py Arial.ttf 50 arial_50.py -x -c 0123456789:
version = '0.33'
def height():
return 50
def baseline():
return 49
def max_width():
return 37
def hmap():
return True
def reverse():
return False
def monospaced():
return False
def min_ch():
return 48
def max_ch():
return 63
_font =\
b'\x25\x00\x00\x03\xfe\x00\x00\x00\x1f\xff\xc0\x00\x00\x7f\xff\xf0'\
b'\x00\x00\xff\xff\xf8\x00\x01\xff\xff\xfc\x00\x03\xff\xff\xfe\x00'\
b'\x07\xfe\x03\xff\x00\x07\xf8\x00\xff\x80\x0f\xf0\x00\x7f\x80\x0f'\
b'\xe0\x00\x3f\x80\x0f\xc0\x00\x1f\xc0\x1f\xc0\x00\x1f\xc0\x1f\xc0'\
b'\x00\x0f\xc0\x1f\x80\x00\x0f\xc0\x03\x80\x00\x0f\xc0\x00\x00\x00'\
b'\x0f\xc0\x00\x00\x00\x0f\xc0\x00\x00\x00\x1f\x80\x00\x00\x00\x3f'\
b'\x80\x00\x00\x00\x7f\x00\x00\x00\x00\xff\x00\x00\x00\x01\xfe\x00'\
b'\x00\x00\x03\xfc\x00\x00\x00\x07\xf8\x00\x00\x00\x0f\xf0\x00\x00'\
b'\x00\x1f\xe0\x00\x00\x00\x3f\xc0\x00\x00\x00\x7f\x80\x00\x00\x00'\
b'\x7f\x00\x00\x00\x00\xfe\x00\x00\x00\x00\xfc\x00\x00\x00\x01\xfc'\
b'\x00\x00\x00\x01\xfc\x00\x00\x00\x01\xf8\x00\x00\x00\x01\xf8\x00'\
b'\x00\x00\x01\xf8\x00\x00\x00\x01\xf8\x00\x00\x00\x01\xf8\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xf8\x00\x00\x00\x01'\
b'\xf8\x00\x00\x00\x01\xf8\x00\x00\x00\x01\xf8\x00\x00\x00\x01\xf8'\
b'\x00\x00\x00\x01\xf8\x00\x00\x00\x00\x00\x00\x00\x25\x00\x00\x00'\
b'\x00\x00\x00\x00\x03\xfe\x00\x00\x00\x0f\xff\x80\x00\x00\x3f\xff'\
b'\xe0\x00\x00\x7f\xff\xf0\x00\x00\xff\xff\xf8\x00\x01\xff\xff\xfc'\
b'\x00\x03\xfe\x07\xfc\x00\x03\xfc\x01\xfe\x00\x07\xf0\x00\xfe\x00'\
b'\x07\xf0\x00\x7f\x00\x07\xe0\x00\x3f\x00\x0f\xe0\x00\x3f\x00\x0f'\
b'\xc0\x00\x1f\x80\x0f\xc0\x00\x1f\x80\x0f\xc0\x00\x1f\x80\x0f\xc0'\
b'\x00\x1f\x80\x1f\x80\x00\x0f\xc0\x1f\x80\x00\x0f\xc0\x1f\x80\x00'\
b'\x0f\xc0\x1f\x80\x00\x0f\xc0\x1f\x80\x00\x0f\xc0\x1f\x80\x00\x0f'\
b'\xc0\x1f\x80\x00\x0f\xc0\x1f\x80\x00\x0f\xc0\x1f\x80\x00\x0f\xc0'\
b'\x1f\x80\x00\x0f\xc0\x1f\x80\x00\x0f\xc0\x1f\x80\x00\x0f\xc0\x1f'\
b'\x80\x00\x0f\xc0\x1f\x80\x00\x0f\xc0\x1f\x80\x00\x0f\xc0\x1f\x80'\
b'\x00\x0f\xc0\x1f\x80\x00\x0f\xc0\x0f\xc0\x00\x1f\x80\x0f\xc0\x00'\
b'\x1f\x80\x0f\xc0\x00\x1f\x80\x0f\xc0\x00\x1f\x80\x0f\xe0\x00\x3f'\
b'\x80\x07\xe0\x00\x3f\x00\x07\xf0\x00\x7f\x00\x07\xf8\x00\xff\x00'\
b'\x03\xfc\x01\xfe\x00\x03\xff\x07\xfe\x00\x01\xff\xff\xfc\x00\x00'\
b'\xff\xff\xf8\x00\x00\x7f\xff\xf0\x00\x00\x3f\xff\xe0\x00\x00\x0f'\
b'\xff\x80\x00\x00\x03\xfe\x00\x00\x25\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x07\x80\x00\x00\x00\x0f\x80\x00\x00\x00\x0f\x80\x00\x00\x00'\
b'\x1f\x80\x00\x00\x00\x3f\x80\x00\x00\x00\x7f\x80\x00\x00\x00\xff'\
b'\x80\x00\x00\x03\xff\x80\x00\x00\x07\xff\x80\x00\x00\x0f\xff\x80'\
b'\x00\x00\x3f\xff\x80\x00\x00\xff\xdf\x80\x00\x01\xff\x9f\x80\x00'\
b'\x01\xfe\x1f\x80\x00\x01\xfc\x1f\x80\x00\x01\xf0\x1f\x80\x00\x01'\
b'\xc0\x1f\x80\x00\x00\x00\x1f\x80\x00\x00\x00\x1f\x80\x00\x00\x00'\
b'\x1f\x80\x00\x00\x00\x1f\x80\x00\x00\x00\x1f\x80\x00\x00\x00\x1f'\
b'\x80\x00\x00\x00\x1f\x80\x00\x00\x00\x1f\x80\x00\x00\x00\x1f\x80'\
b'\x00\x00\x00\x1f\x80\x00\x00\x00\x1f\x80\x00\x00\x00\x1f\x80\x00'\
b'\x00\x00\x1f\x80\x00\x00\x00\x1f\x80\x00\x00\x00\x1f\x80\x00\x00'\
b'\x00\x1f\x80\x00\x00\x00\x1f\x80\x00\x00\x00\x1f\x80\x00\x00\x00'\
b'\x1f\x80\x00\x00\x00\x1f\x80\x00\x00\x00\x1f\x80\x00\x00\x00\x1f'\
b'\x80\x00\x00\x00\x1f\x80\x00\x00\x00\x1f\x80\x00\x00\x00\x1f\x80'\
b'\x00\x00\x00\x1f\x80\x00\x00\x00\x1f\x80\x00\x00\x00\x1f\x80\x00'\
b'\x00\x00\x1f\x80\x00\x00\x00\x1f\x80\x00\x00\x00\x1f\x80\x00\x00'\
b'\x00\x00\x00\x00\x25\x00\x00\x00\x00\x00\x00\x00\x03\xfe\x00\x00'\
b'\x00\x1f\xff\xc0\x00\x00\x7f\xff\xe0\x00\x01\xff\xff\xf8\x00\x03'\
b'\xff\xff\xfc\x00\x03\xff\xff\xfc\x00\x07\xfe\x07\xfe\x00\x0f\xf0'\
b'\x00\xff\x00\x0f\xe0\x00\x7f\x00\x0f\xc0\x00\x3f\x00\x1f\xc0\x00'\
b'\x3f\x80\x1f\x80\x00\x1f\x80\x1f\x80\x00\x1f\x80\x03\x80\x00\x1f'\
b'\x80\x00\x00\x00\x1f\x80\x00\x00\x00\x1f\x80\x00\x00\x00\x1f\x80'\
b'\x00\x00\x00\x3f\x00\x00\x00\x00\x3f\x00\x00\x00\x00\x7f\x00\x00'\
b'\x00\x00\xfe\x00\x00\x00\x01\xfe\x00\x00\x00\x03\xfc\x00\x00\x00'\
b'\x07\xf8\x00\x00\x00\x0f\xf0\x00\x00\x00\x1f\xe0\x00\x00\x00\x3f'\
b'\xe0\x00\x00\x00\x7f\xc0\x00\x00\x00\xff\x80\x00\x00\x01\xfe\x00'\
b'\x00\x00\x03\xfc\x00\x00\x00\x07\xf8\x00\x00\x00\x1f\xf0\x00\x00'\
b'\x00\x3f\xe0\x00\x00\x00\x7f\xc0\x00\x00\x00\xff\x80\x00\x00\x01'\
b'\xfe\x00\x00\x00\x03\xfc\x00\x00\x00\x07\xf8\x00\x00\x00\x07\xf0'\
b'\x00\x00\x00\x0f\xe0\x00\x00\x00\x0f\xe0\x00\x00\x00\x1f\xff\xff'\
b'\xff\x80\x1f\xff\xff\xff\x80\x3f\xff\xff\xff\x80\x3f\xff\xff\xff'\
b'\x80\x3f\xff\xff\xff\x80\x3f\xff\xff\xff\x80\x00\x00\x00\x00\x00'\
b'\x25\x00\x00\x00\x00\x00\x00\x00\x07\xfc\x00\x00\x00\x1f\xff\x80'\
b'\x00\x00\x7f\xff\xe0\x00\x00\xff\xff\xf0\x00\x01\xff\xff\xf8\x00'\
b'\x03\xff\xff\xfc\x00\x07\xfc\x07\xfe\x00\x0f\xf0\x01\xfe\x00\x0f'\
b'\xe0\x00\xfe\x00\x0f\xc0\x00\x7f\x00\x1f\xc0\x00\x3f\x00\x1f\x80'\
b'\x00\x3f\x00\x03\x80\x00\x3f\x00\x00\x00\x00\x3f\x00\x00\x00\x00'\
b'\x3f\x00\x00\x00\x00\x7e\x00\x00\x00\x00\xfe\x00\x00\x00\x01\xfc'\
b'\x00\x00\x00\x0f\xf8\x00\x00\x01\xff\xf0\x00\x00\x01\xff\xe0\x00'\
b'\x00\x01\xff\xe0\x00\x00\x01\xff\xf8\x00\x00\x01\xff\xfc\x00\x00'\
b'\x01\x8f\xfe\x00\x00\x00\x01\xff\x00\x00\x00\x00\x7f\x00\x00\x00'\
b'\x00\x3f\x80\x00\x00\x00\x1f\x80\x00\x00\x00\x1f\xc0\x00\x00\x00'\
b'\x0f\xc0\x00\x00\x00\x0f\xc0\x00\x00\x00\x0f\xc0\x00\x00\x00\x0f'\
b'\xc0\x00\x00\x00\x0f\xc0\x03\x80\x00\x0f\xc0\x1f\x80\x00\x0f\xc0'\
b'\x1f\xc0\x00\x1f\x80\x1f\xc0\x00\x1f\x80\x0f\xe0\x00\x3f\x80\x0f'\
b'\xf0\x00\x7f\x00\x07\xf8\x00\xff\x00\x07\xfe\x03\xfe\x00\x03\xff'\
b'\xff\xfc\x00\x01\xff\xff\xf8\x00\x00\xff\xff\xf0\x00\x00\x7f\xff'\
b'\xe0\x00\x00\x1f\xff\x80\x00\x00\x03\xfc\x00\x00\x25\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xf0\x00\x00\x00\x03'\
b'\xf0\x00\x00\x00\x07\xf0\x00\x00\x00\x0f\xf0\x00\x00\x00\x0f\xf0'\
b'\x00\x00\x00\x1f\xf0\x00\x00\x00\x3f\xf0\x00\x00\x00\x7f\xf0\x00'\
b'\x00\x00\x7f\xf0\x00\x00\x00\xff\xf0\x00\x00\x01\xff\xf0\x00\x00'\
b'\x01\xff\xf0\x00\x00\x03\xfb\xf0\x00\x00\x07\xf3\xf0\x00\x00\x0f'\
b'\xf3\xf0\x00\x00\x0f\xe3\xf0\x00\x00\x1f\xc3\xf0\x00\x00\x3f\x83'\
b'\xf0\x00\x00\x7f\x83\xf0\x00\x00\x7f\x03\xf0\x00\x00\xfe\x03\xf0'\
b'\x00\x01\xfc\x03\xf0\x00\x03\xfc\x03\xf0\x00\x03\xf8\x03\xf0\x00'\
b'\x07\xf0\x03\xf0\x00\x0f\xf0\x03\xf0\x00\x0f\xe0\x03\xf0\x00\x1f'\
b'\xc0\x03\xf0\x00\x3f\x80\x03\xf0\x00\x7f\x80\x03\xf0\x00\x7f\xff'\
b'\xff\xff\xc0\x7f\xff\xff\xff\xc0\x7f\xff\xff\xff\xc0\x7f\xff\xff'\
b'\xff\xc0\x7f\xff\xff\xff\xc0\x7f\xff\xff\xff\xc0\x00\x00\x03\xf0'\
b'\x00\x00\x00\x03\xf0\x00\x00\x00\x03\xf0\x00\x00\x00\x03\xf0\x00'\
b'\x00\x00\x03\xf0\x00\x00\x00\x03\xf0\x00\x00\x00\x03\xf0\x00\x00'\
b'\x00\x03\xf0\x00\x00\x00\x03\xf0\x00\x00\x00\x03\xf0\x00\x00\x00'\
b'\x03\xf0\x00\x00\x00\x00\x00\x00\x25\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x7f\xff\xff\x00\x00\x7f\xff\xff\x00\x00\xff'\
b'\xff\xff\x00\x00\xff\xff\xff\x00\x00\xff\xff\xff\x00\x00\xff\xff'\
b'\xff\x00\x00\xfc\x00\x00\x00\x01\xfc\x00\x00\x00\x01\xf8\x00\x00'\
b'\x00\x01\xf8\x00\x00\x00\x01\xf8\x00\x00\x00\x01\xf8\x00\x00\x00'\
b'\x03\xf8\x00\x00\x00\x03\xf0\x00\x00\x00\x03\xf0\x00\x00\x00\x03'\
b'\xf0\x7f\x00\x00\x03\xf3\xff\xc0\x00\x07\xf7\xff\xf0\x00\x07\xff'\
b'\xff\xf8\x00\x07\xff\xff\xfc\x00\x07\xff\xff\xfe\x00\x07\xfe\x03'\
b'\xff\x00\x0f\xf8\x00\xff\x00\x0f\xf0\x00\x7f\x80\x0f\xe0\x00\x3f'\
b'\x80\x01\xc0\x00\x1f\x80\x00\x00\x00\x1f\xc0\x00\x00\x00\x0f\xc0'\
b'\x00\x00\x00\x0f\xc0\x00\x00\x00\x0f\xc0\x00\x00\x00\x0f\xc0\x00'\
b'\x00\x00\x0f\xc0\x00\x00\x00\x0f\xc0\x00\x00\x00\x0f\xc0\x00\x00'\
b'\x00\x0f\xc0\x1f\x80\x00\x1f\x80\x1f\x80\x00\x1f\x80\x1f\xc0\x00'\
b'\x1f\x80\x0f\xc0\x00\x3f\x00\x0f\xe0\x00\x7f\x00\x07\xf0\x00\xfe'\
b'\x00\x07\xfc\x03\xfe\x00\x03\xff\xff\xfc\x00\x01\xff\xff\xf8\x00'\
b'\x00\xff\xff\xf0\x00\x00\x7f\xff\xe0\x00\x00\x1f\xff\x80\x00\x00'\
b'\x07\xfc\x00\x00\x25\x00\x00\x00\x00\x00\x00\x00\x01\xfe\x00\x00'\
b'\x00\x0f\xff\xc0\x00\x00\x3f\xff\xf0\x00\x00\x7f\xff\xf8\x00\x00'\
b'\xff\xff\xfc\x00\x01\xff\xff\xfe\x00\x03\xff\x03\xfe\x00\x03\xf8'\
b'\x00\xff\x00\x07\xf0\x00\x7f\x00\x07\xf0\x00\x3f\x00\x0f\xe0\x00'\
b'\x3f\x80\x0f\xc0\x00\x1f\x80\x0f\xc0\x00\x00\x00\x1f\x80\x00\x00'\
b'\x00\x1f\x80\x00\x00\x00\x1f\x80\x00\x00\x00\x1f\x80\x00\x00\x00'\
b'\x1f\x00\xff\x00\x00\x3f\x07\xff\xc0\x00\x3f\x0f\xff\xf0\x00\x3f'\
b'\x3f\xff\xf8\x00\x3f\x7f\xff\xfc\x00\x3f\x7f\xff\xfe\x00\x3f\xfe'\
b'\x03\xff\x00\x3f\xf0\x00\xff\x00\x3f\xe0\x00\x7f\x80\x3f\xc0\x00'\
b'\x3f\x80\x3f\x80\x00\x1f\x80\x3f\x80\x00\x1f\xc0\x3f\x00\x00\x0f'\
b'\xc0\x3f\x00\x00\x0f\xc0\x3f\x00\x00\x0f\xc0\x3f\x00\x00\x0f\xc0'\
b'\x1f\x00\x00\x0f\xc0\x1f\x00\x00\x0f\xc0\x1f\x00\x00\x0f\xc0\x1f'\
b'\x80\x00\x1f\xc0\x1f\x80\x00\x1f\x80\x0f\xc0\x00\x1f\x80\x0f\xc0'\
b'\x00\x3f\x80\x07\xe0\x00\x7f\x00\x07\xf8\x00\xff\x00\x03\xfe\x03'\
b'\xfe\x00\x01\xff\xff\xfc\x00\x01\xff\xff\xfc\x00\x00\x7f\xff\xf8'\
b'\x00\x00\x3f\xff\xe0\x00\x00\x0f\xff\xc0\x00\x00\x01\xfe\x00\x00'\
b'\x25\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1f\xff\xff\xff'\
b'\xc0\x1f\xff\xff\xff\xc0\x1f\xff\xff\xff\xc0\x1f\xff\xff\xff\xc0'\
b'\x1f\xff\xff\xff\xc0\x1f\xff\xff\xff\x80\x00\x00\x00\x0f\x80\x00'\
b'\x00\x00\x1f\x00\x00\x00\x00\x3e\x00\x00\x00\x00\x7c\x00\x00\x00'\
b'\x00\xfc\x00\x00\x00\x01\xf8\x00\x00\x00\x01\xf0\x00\x00\x00\x03'\
b'\xf0\x00\x00\x00\x07\xe0\x00\x00\x00\x07\xc0\x00\x00\x00\x0f\xc0'\
b'\x00\x00\x00\x1f\x80\x00\x00\x00\x1f\x80\x00\x00\x00\x3f\x00\x00'\
b'\x00\x00\x3f\x00\x00\x00\x00\x7e\x00\x00\x00\x00\x7e\x00\x00\x00'\
b'\x00\xfc\x00\x00\x00\x00\xfc\x00\x00\x00\x01\xf8\x00\x00\x00\x01'\
b'\xf8\x00\x00\x00\x03\xf0\x00\x00\x00\x03\xf0\x00\x00\x00\x03\xf0'\
b'\x00\x00\x00\x07\xe0\x00\x00\x00\x07\xe0\x00\x00\x00\x07\xe0\x00'\
b'\x00\x00\x0f\xc0\x00\x00\x00\x0f\xc0\x00\x00\x00\x0f\xc0\x00\x00'\
b'\x00\x0f\xc0\x00\x00\x00\x1f\x80\x00\x00\x00\x1f\x80\x00\x00\x00'\
b'\x1f\x80\x00\x00\x00\x1f\x80\x00\x00\x00\x1f\x80\x00\x00\x00\x3f'\
b'\x00\x00\x00\x00\x3f\x00\x00\x00\x00\x3f\x00\x00\x00\x00\x3f\x00'\
b'\x00\x00\x00\x3f\x00\x00\x00\x00\x00\x00\x00\x00\x25\x00\x00\x00'\
b'\x00\x00\x00\x00\x03\xfe\x00\x00\x00\x1f\xff\x80\x00\x00\x3f\xff'\
b'\xe0\x00\x00\x7f\xff\xf0\x00\x00\xff\xff\xf8\x00\x01\xff\xff\xfc'\
b'\x00\x03\xfe\x03\xfe\x00\x03\xf8\x00\xfe\x00\x03\xf0\x00\x7e\x00'\
b'\x07\xf0\x00\x7f\x00\x07\xe0\x00\x3f\x00\x07\xe0\x00\x3f\x00\x07'\
b'\xe0\x00\x3f\x00\x07\xe0\x00\x3f\x00\x07\xe0\x00\x3f\x00\x07\xf0'\
b'\x00\x7f\x00\x03\xf0\x00\x7e\x00\x03\xf8\x00\xfe\x00\x01\xfe\x03'\
b'\xfc\x00\x00\xff\xff\xf8\x00\x00\x7f\xff\xf0\x00\x00\x1f\xff\xc0'\
b'\x00\x00\x3f\xff\xe0\x00\x00\xff\xff\xf8\x00\x01\xff\xff\xfc\x00'\
b'\x03\xfe\x03\xfe\x00\x07\xf8\x00\xff\x00\x07\xe0\x00\x7f\x00\x0f'\
b'\xe0\x00\x3f\x80\x0f\xc0\x00\x1f\x80\x1f\xc0\x00\x1f\xc0\x1f\x80'\
b'\x00\x0f\xc0\x1f\x80\x00\x0f\xc0\x1f\x80\x00\x0f\xc0\x1f\x80\x00'\
b'\x0f\xc0\x1f\x80\x00\x0f\xc0\x1f\x80\x00\x0f\xc0\x1f\xc0\x00\x1f'\
b'\xc0\x0f\xc0\x00\x1f\x80\x0f\xc0\x00\x3f\x80\x0f\xe0\x00\x3f\x80'\
b'\x07\xf8\x00\xff\x00\x07\xfe\x03\xff\x00\x03\xff\xff\xfe\x00\x01'\
b'\xff\xff\xfc\x00\x00\xff\xff\xf8\x00\x00\x7f\xff\xf0\x00\x00\x1f'\
b'\xff\xc0\x00\x00\x03\xfe\x00\x00\x25\x00\x00\x00\x00\x00\x00\x00'\
b'\x03\xfc\x00\x00\x00\x1f\xff\x00\x00\x00\x7f\xff\xc0\x00\x00\xff'\
b'\xff\xf0\x00\x01\xff\xff\xf8\x00\x03\xff\xff\xfc\x00\x03\xfe\x03'\
b'\xfc\x00\x07\xf8\x00\xfe\x00\x0f\xf0\x00\x7e\x00\x0f\xe0\x00\x3f'\
b'\x00\x0f\xe0\x00\x1f\x00\x1f\xc0\x00\x1f\x80\x1f\xc0\x00\x0f\x80'\
b'\x1f\x80\x00\x0f\x80\x1f\x80\x00\x0f\x80\x1f\x80\x00\x0f\xc0\x1f'\
b'\x80\x00\x0f\xc0\x1f\x80\x00\x0f\xc0\x1f\x80\x00\x0f\xc0\x1f\x80'\
b'\x00\x0f\xc0\x1f\xc0\x00\x1f\xc0\x0f\xc0\x00\x1f\xc0\x0f\xe0\x00'\
b'\x3f\xc0\x0f\xf0\x00\x7f\xc0\x07\xf8\x00\xff\xc0\x07\xfe\x03\xff'\
b'\xc0\x03\xff\xff\xff\xc0\x01\xff\xff\xef\xc0\x00\xff\xff\xcf\xc0'\
b'\x00\x7f\xff\x0f\xc0\x00\x1f\xfe\x0f\xc0\x00\x07\xf0\x0f\xc0\x00'\
b'\x00\x00\x1f\x80\x00\x00\x00\x1f\x80\x00\x00\x00\x1f\x80\x00\x00'\
b'\x00\x1f\x80\x00\x00\x00\x3f\x00\x0f\xc0\x00\x3f\x00\x0f\xc0\x00'\
b'\x3f\x00\x0f\xe0\x00\x7e\x00\x07\xe0\x00\xfe\x00\x07\xf0\x01\xfc'\
b'\x00\x03\xfc\x07\xfc\x00\x03\xff\xff\xf8\x00\x01\xff\xff\xf0\x00'\
b'\x00\xff\xff\xe0\x00\x00\x7f\xff\xc0\x00\x00\x1f\xff\x00\x00\x00'\
b'\x07\xf8\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x03\xf0\x00\x03\xf0\x00\x03\xf0\x00\x03\xf0\x00\x03\xf0\x00\x03'\
b'\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x03\xf0\x00\x03\xf0\x00\x03\xf0\x00'\
b'\x03\xf0\x00\x03\xf0\x00\x03\xf0\x00\x00\x00\x00'
_index =\
b'\x00\x00\xfc\x00\xf8\x01\xf4\x02\xf0\x03\xec\x04\xe8\x05\xe4\x06'\
b'\xe0\x07\xdc\x08\xd8\x09\xd4\x0a\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x6c\x0b'
_mvfont = memoryview(_font)
_mvi = memoryview(_index)
ifb = lambda l : l[0] | (l[1] << 8)
def get_ch(ch):
oc = ord(ch)
ioff = 2 * (oc - 48 + 1) if oc >= 48 and oc <= 63 else 0
doff = ifb(_mvi[ioff : ])
width = ifb(_mvfont[doff : ])
next_offs = doff + 2 + ((width - 1)//8 + 1) * 50
return _mvfont[doff + 2:next_offs], 50, width
| 58.158798
| 68
| 0.703786
| 3,272
| 13,551
| 2.908619
| 0.031785
| 0.46212
| 0.381107
| 0.240832
| 0.8385
| 0.753494
| 0.700116
| 0.62982
| 0.556163
| 0.505622
| 0
| 0.339435
| 0.028411
| 13,551
| 232
| 69
| 58.409483
| 0.383412
| 0.009889
| 0
| 0.12093
| 1
| 0.860465
| 0.883024
| 0.881533
| 0
| 1
| 0
| 0
| 0
| 1
| 0.04186
| false
| 0
| 0
| 0.037209
| 0.083721
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
8650b678b1deb288e3f71c5785e520b3a120b022
| 55,912
|
py
|
Python
|
backend/api/tests/ProyectoTestCase.py
|
kukiamarilla/polijira
|
510dbc1473db973ac71fc68fa5a9b758b90a780b
|
[
"MIT"
] | 1
|
2022-03-02T02:28:49.000Z
|
2022-03-02T02:28:49.000Z
|
backend/api/tests/ProyectoTestCase.py
|
kukiamarilla/polijira
|
510dbc1473db973ac71fc68fa5a9b758b90a780b
|
[
"MIT"
] | 22
|
2021-09-01T17:44:25.000Z
|
2021-10-07T19:39:09.000Z
|
backend/api/tests/ProyectoTestCase.py
|
kukiamarilla/polijira
|
510dbc1473db973ac71fc68fa5a9b758b90a780b
|
[
"MIT"
] | null | null | null |
import datetime
from django.test import TestCase, Client
from backend.api.models import Miembro, Proyecto, RolProyecto, Usuario, Permiso, PermisoProyecto, SprintBacklog
from backend.api.models.Sprint import Sprint
from backend.api.models.UserStory import UserStory
class ProyectoTestCase(TestCase):
"""
ProyectoTestCase Prueba las funcionalidades del modelo Proyecto
"""
fixtures = [
"backend/api/fixtures/testing/auth.json",
"backend/api/fixtures/testing/usuarios.json",
"backend/api/fixtures/testing/permisos.json",
"backend/api/fixtures/testing/roles.json",
"backend/api/fixtures/testing/proyectos.json",
"backend/api/fixtures/testing/permisosProyecto.json",
"backend/api/fixtures/testing/plantillas.json",
"backend/api/fixtures/testing/rolesProyecto.json",
"backend/api/fixtures/testing/miembros.json",
"backend/api/fixtures/testing/horarios.json",
"backend/api/fixtures/testing/user-stories.json",
"backend/api/fixtures/testing/product-backlogs.json",
"backend/api/fixtures/testing/registro-user-stories.json",
"backend/api/fixtures/testing/sprints.json",
"backend/api/fixtures/testing/sprintbacklogs.json",
"backend/api/fixtures/testing/miembrosprints.json",
]
def setUp(self):
"""
setUp Configura el TestCase
"""
self.client = Client()
def test_listar_todos_los_proyectos_sin_permiso_ver_proyecto(self):
"""
test_listar_proyectos_sin_permiso_ver_proyecto Prueba listar todos los proyectos al que es miembro el usuario
"""
print("\nProbando listar todos los proyectos.")
self.client.login(username="testing", password="polijira2021")
Permiso.objects.get(codigo="ver_proyectos").delete()
response = self.client.get("/api/proyectos/")
body = response.json()
self.assertEquals(response.status_code, 200)
usuario = Usuario.objects.get(pk=1)
self.assertEquals(Proyecto.objects.filter(miembros__usuario=usuario).count(), len(body))
def test_listar_todos_los_proyectos(self):
"""
test_listar_todos_los_proyectos Prueba listar todos los proyectos del sistema
"""
print("\nProbando listar todos los proyectos del sistema.")
self.client.login(username="testing", password="polijira2021")
response = self.client.get("/api/proyectos/")
body = response.json()
self.assertEquals(response.status_code, 200)
self.assertEquals(Proyecto.objects.count(), len(body))
def test_obtener_proyecto(self):
"""
test_obtener_proyecto Prueba obtener detalles de un proyecto
"""
print("\nProbando obtener detalles de un proyecto.")
self.client.login(username="testing", password="polijira2021")
response = self.client.get("/api/proyectos/1/")
self.assertEquals(response.status_code, 200)
body = response.json()
proyecto = Proyecto.objects.get(pk=1)
self.assertEquals(body['nombre'], proyecto.nombre)
self.assertEquals(body['fecha_inicio'], str(proyecto.fecha_inicio))
self.assertEquals(body['fecha_fin'], str(proyecto.fecha_fin))
self.assertEquals(body['scrum_master']['id'], proyecto.scrum_master.id)
self.assertEquals(body['estado'], proyecto.estado)
def test_obtener_proyecto_no_existente(self):
"""
test_obtener_proyecto_no_existente Prueba obtener detalles de un proyecto que no existe
"""
print("\nProbando obtener detalles de un proyecto que no existe")
self.client.login(username="testing", password="polijira2021")
response = self.client.get("/api/proyectos/1000/")
body = response.json()
self.assertEquals(body['error'], 'not_found')
self.assertEquals(response.status_code, 404)
def test_crear_proyecto(self):
"""
test_crear_proyecto Prueba crear un proyecto
"""
print("\nProbando crear un proyecto")
self.client.login(username="testing", password="polijira2021")
proyecto_body = {
"nombre": "ProyectoTestCrear",
"fecha_inicio": datetime.date.today(),
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
"scrum_master_id": 2
}
response = self.client.post("/api/proyectos/", proyecto_body, content_type="application/json")
self.assertEquals(response.status_code, 200)
proyecto = Proyecto.objects.filter(
nombre=proyecto_body["nombre"],
fecha_inicio=proyecto_body["fecha_inicio"],
fecha_fin=proyecto_body["fecha_fin"],
scrum_master=Usuario.objects.get(pk=proyecto_body["scrum_master_id"]),
estado="P"
)
self.assertEquals(len(proyecto), 1)
proyecto = proyecto[0]
miembro = Miembro.objects.filter(
usuario=proyecto.scrum_master,
proyecto=proyecto,
rol=RolProyecto.objects.get(nombre="Scrum Master", proyecto=proyecto)
)
self.assertEquals(len(miembro), 1)
body = response.json()
self.assertEquals(body["nombre"], proyecto.nombre)
self.assertEquals(body["fecha_inicio"], str(proyecto.fecha_inicio))
self.assertEquals(body["fecha_fin"], str(proyecto.fecha_fin))
self.assertEquals(body["scrum_master"]["id"], proyecto.scrum_master.id)
self.assertEquals(proyecto.estado, "P")
def test_crear_proyecto_pasando_un_estado(self):
"""
test_crear_proyecto_pasando_un_estado
Prueba crear un proyecto pasando un estado que difiere de su estado inicial Pendiente
"""
print("\nProbando crear un proyecto asignando un estado diferente a Pendiente")
self.client.login(username="testing", password="polijira2021")
proyecto_body = {
"nombre": "ProyectoTestCrear",
"fecha_inicio": datetime.date.today(),
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
"scrum_master_id": 2,
"estado": "A"
}
response = self.client.post("/api/proyectos/", proyecto_body)
self.assertEquals(response.status_code, 200)
proyecto = Proyecto.objects.filter(
nombre=proyecto_body["nombre"],
fecha_inicio=proyecto_body["fecha_inicio"],
fecha_fin=proyecto_body["fecha_fin"],
scrum_master=Usuario.objects.get(pk=proyecto_body["scrum_master_id"]),
)
self.assertEquals(len(proyecto), 1)
proyecto = proyecto[0]
self.assertEquals(proyecto.estado, "P")
def test_crear_proyecto_sin_permiso_crear_proyectos(self):
"""
test_crear_proyecto_sin_permiso_crear_proyectos Prueba crear un proyecto sin tener permiso de crear proyectos
"""
print("\nProbando crear proyecto sin permiso crear proyectos")
self.client.login(username="testing", password="polijira2021")
Permiso.objects.get(pk=11).delete()
proyecto_body = {
"nombre": "ProyectoTestCrear",
"fecha_inicio": datetime.date.today(),
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
"scrum_master_id": 2
}
response = self.client.post("/api/proyectos/", proyecto_body)
self.assertEquals(response.status_code, 403)
body = response.json()
self.assertEquals(body["permission_required"], ["crear_proyectos", "ver_usuarios"])
self.assertEquals(body["error"], "forbidden")
def test_crear_proyecto_sin_permiso_ver_usuarios(self):
"""
test_crear_proyecto_sin_permiso_crear_proyectos Prueba crear un proyecto sin tener permiso de ver usuarios
"""
print("\nProbando crear proyecto sin permiso ver usuarios")
self.client.login(username="testing", password="polijira2021")
Permiso.objects.get(pk=1).delete()
proyecto_body = {
"nombre": "ProyectoTestCrear",
"fecha_inicio": datetime.date.today(),
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
"scrum_master_id": 2
}
response = self.client.post("/api/proyectos/", proyecto_body)
self.assertEquals(response.status_code, 403)
body = response.json()
self.assertEquals(body["permission_required"], ["crear_proyectos", "ver_usuarios"])
self.assertEquals(body["error"], "forbidden")
def test_crear_proyecto_con_sm_no_existente(self):
"""
test_crear_proyecto_con_usuario_no_existente Prueba crear un proyecto con un Scrum Master que no existe
"""
print("\nProbando crear proyecto con un scrum master que no existe")
self.client.login(username="testing", password="polijira2021")
proyecto_body = {
"nombre": "ProyectoTestCrear",
"fecha_inicio": datetime.date.today(),
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
"scrum_master_id": 55
}
response = self.client.post("/api/proyectos/", proyecto_body)
self.assertEquals(response.status_code, 404)
body = response.json()
self.assertEquals(body["error"], "not_found")
def test_crear_proyecto_con_nombre_existente(self):
"""
test_crear_proyecto_con_nombre_existente Prueba crear un proyecto con nombre ya existente
"""
print("\nProbando crear un proyecto con nombre existente.")
self.client.login(username="testing", password="polijira2021")
Proyecto.objects.create(nombre="Proyecto A", fecha_inicio=datetime.date.today(),
fecha_fin=datetime.date.today() + datetime.timedelta(100), scrum_master_id=1)
proyecto_body = {
"nombre": "Proyecto A",
"fecha_inicio": datetime.date.today(),
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
"scrum_master_id": 1
}
response = self.client.post("/api/proyectos/", proyecto_body, content_type="application/json")
self.assertEquals(response.status_code, 422)
body = response.json()
self.assertEquals(body["message"], "Error de validación")
self.assertEquals(body["errors"]["nombre"], ["Ya existe un proyecto con ese nombre"])
def test_crear_proyecto_con_campo_nombre_superando_max_length(self):
"""
test_crear_proyecto_con_campo_nombre_superando_max_length
Prueba crear un proyecto con un nombre que supera el limite máximo de caracteres
"""
print("\nProbando crear un proyecto con un nombre que supere el max_length=255")
self.client.login(username="testing", password="polijira2021")
nombre_proyecto = ""
for i in range(0, 256):
nombre_proyecto += "a"
proyecto_body = {
"nombre": nombre_proyecto,
"fecha_inicio": datetime.date.today(),
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
"scrum_master_id": 2
}
response = self.client.post("/api/proyectos/", proyecto_body)
self.assertEquals(response.status_code, 422)
body = response.json()
self.assertEquals(len(body["errors"]), 1)
self.assertEquals(body["errors"]["nombre"], ["Sobrepasó el limite de caracteres"])
def test_crear_proyecto_con_campo_nombre_no_asignado(self):
"""
test_crear_proyecto_con_campo_nombre_no_asignado Prueba crear un proyecto sin especificar el campo nombre
"""
print("\nProbando crear un proyecto sin asignar el campo nombre")
self.client.login(username="testing", password="polijira2021")
proyecto_body = {
"fecha_inicio": datetime.date.today(),
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
"scrum_master_id": 3
}
response = self.client.post("/api/proyectos/", proyecto_body)
self.assertEquals(response.status_code, 422)
body = response.json()
self.assertEquals(len(body["errors"]), 1)
self.assertEquals(body["errors"]["nombre"], ["No se especificó ningun nombre"])
def test_crear_proyecto_con_campo_fecha_inicio_no_asignado(self):
"""
test_crear_proyecto_con_campo_fecha_inicio_no_asignado
Prueba crear un proyecto sin especificar el campo fecha de inicio
"""
print("\nProbando crear un proyecto sin especificar el campo fecha de inicio")
self.client.login(username="testing", password="polijira2021")
proyecto_body = {
"nombre": "ProyectoTestCrear",
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
"scrum_master_id": 3
}
response = self.client.post("/api/proyectos/", proyecto_body)
self.assertEquals(response.status_code, 422)
body = response.json()
self.assertEquals(len(body["errors"]), 1)
self.assertEquals(body["errors"]["fecha_inicio"], ["No se especificó ninguna fecha de inicio"])
def test_crear_proyecto_con_campo_fecha_fin_no_asignado(self):
"""
test_crear_proyecto_con_campo_fecha_inicio_no_asignado
Prueba crear un proyecto sin especificar el campo fecha de fin
"""
print("\nProbando crear un proyecto sin especificar el campo fecha de fin")
self.client.login(username="testing", password="polijira2021")
proyecto_body = {
"nombre": "ProyectoTestCrear",
"fecha_inicio": datetime.date.today(),
"scrum_master_id": 3
}
response = self.client.post("/api/proyectos/", proyecto_body)
self.assertEquals(response.status_code, 422)
body = response.json()
self.assertEquals(len(body["errors"]), 1)
self.assertEquals(body["errors"]["fecha_fin"], ["No se especificó ninguna fecha de fin"])
def test_crear_proyecto_con_campo_scrum_master_id_sin_especificar(self):
"""
test_crear_proyecto_con_campo_scrum_master_id_sin_especificar
Prueba crear un proyecto sin especificar al Scrum Master
"""
print("\nProbando crear un proyecto sin especificar al Scrum Master")
self.client.login(username="testing", password="polijira2021")
proyecto_body = {
"nombre": "ProyectoTestCrear",
"fecha_inicio": datetime.date.today(),
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
}
response = self.client.post("/api/proyectos/", proyecto_body)
self.assertEquals(response.status_code, 422)
body = response.json()
self.assertEquals(len(body["errors"]), 1)
self.assertEquals(body["errors"]["scrum_master_id"], ["No se especificó el Scrum Master"])
def test_modificar_proyecto(self):
"""
test_modificar_proyecto Prueba modificar un proyecto
"""
print("\nProbando modificar un proyecto")
self.client.login(username="testing", password="polijira2021")
proyecto_body = {
"nombre": "ProyectoTestModificar",
"fecha_inicio": datetime.date.today(),
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
"scrum_master_id": 1
}
response = self.client.put("/api/proyectos/1/", proyecto_body, "application/json")
self.assertEquals(response.status_code, 200)
proyecto = Proyecto.objects.filter(
nombre=proyecto_body["nombre"],
fecha_inicio=proyecto_body["fecha_inicio"],
fecha_fin=proyecto_body["fecha_fin"],
scrum_master=Usuario.objects.get(pk=proyecto_body["scrum_master_id"])
)
self.assertEquals(len(proyecto), 1)
proyecto = proyecto[0]
miembro = Miembro.objects.filter(
usuario=proyecto.scrum_master,
proyecto=proyecto,
rol=RolProyecto.objects.get(nombre="Scrum Master")
)
self.assertEquals(len(miembro), 1)
body = response.json()
self.assertEquals(proyecto.estado, "P")
self.assertEquals(body["nombre"], proyecto.nombre)
self.assertEquals(body["fecha_inicio"], str(proyecto.fecha_inicio))
self.assertEquals(body["fecha_fin"], str(proyecto.fecha_fin))
self.assertEquals(body["scrum_master"]["id"], proyecto.scrum_master.id)
# TODO self.asserEquals(body["scrum_master"]["rol"]["nombre"], "Scrum Master")
def test_modificar_proyecto_con_estado_activo(self):
"""
test_modificar_proyecto_con_estado_activo Prueba modificar un proyecto en estado Activado
"""
print("\nProbando modificar un proyecto en estado activado")
self.client.login(username="testing", password="polijira2021")
proyecto = Proyecto.objects.get(pk=1)
proyecto.estado = "A"
proyecto.save()
proyecto_body = {
"nombre": "ProyectoTestModificar",
"fecha_inicio": datetime.date.today(),
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
"scrum_master_id": 1
}
response = self.client.put("/api/proyectos/1/", proyecto_body, "application/json")
body = response.json()
self.assertEquals(response.status_code, 400)
self.assertEquals(body["error"], "bad_request")
def test_modificar_proyecto_con_estado_finalizado(self):
"""
test_modificar_proyecto_con_estado_finalizado Prueba modificar un proyecto en estado Finalizado
"""
print("\nProbando modificar un proyecto en estado Finalizado")
self.client.login(username="testing", password="polijira2021")
proyecto = Proyecto.objects.get(pk=1)
proyecto.estado = "F"
proyecto.save()
proyecto_body = {
"nombre": "ProyectoTestModificar",
"fecha_inicio": datetime.date.today(),
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
"scrum_master_id": 1
}
response = self.client.put("/api/proyectos/1/", proyecto_body, "application/json")
body = response.json()
self.assertEquals(response.status_code, 400)
self.assertEquals(body["error"], "bad_request")
def test_modificar_proyecto_con_estado_cancelado(self):
"""
test_modificar_proyecto_con_estado_cancelado Prueba modificar un proyecto en estado Cancelado
"""
print("\nProbando modificar un proyecto en estado Cancelado")
self.client.login(username="testing", password="polijira2021")
proyecto = Proyecto.objects.get(pk=1)
proyecto.estado = "C"
proyecto.save()
proyecto_body = {
"nombre": "ProyectoTestModificar",
"fecha_inicio": datetime.date.today(),
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
"scrum_master_id": 1
}
response = self.client.put("/api/proyectos/1/", proyecto_body, "application/json")
body = response.json()
self.assertEquals(response.status_code, 400)
self.assertEquals(body["error"], "bad_request")
def test_modificar_proyecto_sin_permiso_modificar_proyectos(self):
"""
test_modificar_proyecto_sin_permiso_crear_proyectos
Prueba modificar un proyecto sin tener permiso modificar proyectos
"""
print("\nProbando modificar proyecto sin permiso crear proyectos")
self.client.login(username="testing", password="polijira2021")
Permiso.objects.get(pk=12).delete()
proyecto_body = {
"nombre": "ProyectoTestModificar",
"fecha_inicio": datetime.date.today(),
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
"scrum_master_id": 2
}
response = self.client.put("/api/proyectos/1/", proyecto_body, "application/json")
self.assertEquals(response.status_code, 403)
body = response.json()
self.assertEquals(body["permission_required"], ["modificar_proyectos", "ver_proyectos", "ver_usuarios"])
self.assertEquals(body["error"], "forbidden")
def test_modificar_proyecto_sin_permiso_ver_proyectos(self):
"""
test_modificar_proyecto_sin_permiso_ver_proyectos
Prueba modificar un proyecto sin tener permiso ver proyectos
"""
print("\nProbando modificar un proyecto sin permiso ver proyectos")
self.client.login(username="testing", password="polijira2021")
Permiso.objects.get(pk=10).delete()
proyecto_body = {
"nombre": "ProyectoTestModificar",
"fecha_inicio": datetime.date.today(),
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
"scrum_master_id": 2
}
response = self.client.put("/api/proyectos/1/", proyecto_body, "application/json")
self.assertEquals(response.status_code, 403)
body = response.json()
self.assertEquals(body["permission_required"], ["modificar_proyectos", "ver_proyectos", "ver_usuarios"])
self.assertEquals(body["error"], "forbidden")
def test_modificar_proyecto_sin_permiso_ver_usuarios(self):
"""
test_modificar_proyecto_sin_permiso_ver_proyectos
Prueba modificar un proyecto sin tener permiso ver usuarios
"""
print("\nProbando modificar un proyecto sin permiso ver proyectos")
self.client.login(username="testing", password="polijira2021")
Permiso.objects.get(pk=1).delete()
proyecto_body = {
"nombre": "ProyectoTestModificar",
"fecha_inicio": datetime.date.today(),
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
"scrum_master_id": 2
}
response = self.client.put("/api/proyectos/1/", proyecto_body, "application/json")
self.assertEquals(response.status_code, 403)
body = response.json()
self.assertEquals(body["permission_required"], ["modificar_proyectos", "ver_proyectos", "ver_usuarios"])
self.assertEquals(body["error"], "forbidden")
def test_modificar_proyecto_con_nombre_existente(self):
"""
test_modificar_proyecto_con_nombre_existente Prueba modificar un proyecto con nombre ya existente
"""
print("\nProbando modificar un proyecto con nombre existente.")
self.client.login(username="testing", password="polijira2021")
Proyecto.objects.create(nombre="Proyecto A", fecha_inicio=datetime.date.today(),
fecha_fin=datetime.date.today() + datetime.timedelta(100), scrum_master_id=1)
proyecto_body = {
"nombre": "Proyecto A",
"fecha_inicio": datetime.date.today(),
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
"scrum_master_id": 1
}
response = self.client.put("/api/proyectos/1/", proyecto_body, content_type="application/json")
self.assertEquals(response.status_code, 403)
body = response.json()
self.assertEquals(body["message"], "Ya existe un proyecto con ese nombre")
self.assertEquals(body["error"], "forbidden")
def test_modificar_proyecto_no_existente(self):
"""
test_modificar_proyecto_no_existente Prueba modificar un proyecto que no existe
"""
print("\nProbando modificar un proyecto que no existe")
self.client.login(username="testing", password="polijira2021")
proyecto_body = {
"nombre": "ProyectoTestModificar",
"fecha_inicio": datetime.date.today(),
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
"scrum_master_id": 2
}
response = self.client.put("/api/proyectos/1000/", proyecto_body, "application/json")
self.assertEquals(response.status_code, 404)
body = response.json()
self.assertEquals(body["error"], "not_found")
def test_modificar_proyecto_con_sm_no_existente(self):
"""
test_modificar_proyecto_con_usuario_no_existente
Prueba modificar un proyecto con un Scrum Master que no existe
"""
print("\nProbando modificar proyecto con un scrum master que no existe")
self.client.login(username="testing", password="polijira2021")
proyecto_body = {
"nombre": "ProyectoTestCrear",
"fecha_inicio": datetime.date.today(),
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
"scrum_master_id": 55
}
response = self.client.put("/api/proyectos/1/", proyecto_body, "application/json")
self.assertEquals(response.status_code, 404)
body = response.json()
self.assertEquals(body["error"], "not_found")
def test_modificar_proyecto_con_campo_nombre_superando_max_length(self):
"""
test_modificar_proyecto_con_campo_nombre_superando_max_length
Prueba modificar un proyecto con un nombre que supera el limite máximo de caracteres
"""
print("\nProbando modificar un proyecto con un nombre que supere el max_length=255")
self.client.login(username="testing", password="polijira2021")
nombre_proyecto = ""
for i in range(0, 256):
nombre_proyecto += "a"
proyecto_body = {
"nombre": nombre_proyecto,
"fecha_inicio": datetime.date.today(),
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
"scrum_master_id": 2
}
response = self.client.put("/api/proyectos/1/", proyecto_body, "application/json")
self.assertEquals(response.status_code, 422)
body = response.json()
self.assertEquals(len(body["errors"]), 1)
self.assertEquals(body["errors"]["nombre"], ["Sobrepasó el limite de caracteres"])
def test_modificar_proyecto_con_campo_nombre_no_asignado(self):
"""
test_modificar_proyecto_con_campo_nombre_no_asignado
Prueba modificar un proyecto sin especificar el campo nombre
"""
print("\nProbando modificar un proyecto sin asignar el campo nombre")
self.client.login(username="testing", password="polijira2021")
proyecto_body = {
"fecha_inicio": datetime.date.today(),
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
"scrum_master_id": 3
}
response = self.client.put("/api/proyectos/1/", proyecto_body, "application/json")
self.assertEquals(response.status_code, 422)
body = response.json()
self.assertEquals(len(body["errors"]), 1)
self.assertEquals(body["errors"]["nombre"], ["No se especificó ningun nombre"])
def test_modificar_proyecto_con_campo_fecha_inicio_no_asignado(self):
"""
test_modificar_proyecto_con_campo_fecha_inicio_no_asignado
Prueba modificar un proyecto sin especificar el campo fecha de inicio
"""
print("\nProbando modificar un proyecto sin especificar el campo fecha de inicio")
self.client.login(username="testing", password="polijira2021")
proyecto_body = {
"nombre": "ProyectoTestCrear",
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
"scrum_master_id": 3
}
response = self.client.put("/api/proyectos/1/", proyecto_body, "application/json")
self.assertEquals(response.status_code, 422)
body = response.json()
self.assertEquals(len(body["errors"]), 1)
self.assertEquals(body["errors"]["fecha_inicio"], ["No se especificó ninguna fecha de inicio"])
def test_modificar_proyecto_con_campo_fecha_fin_no_asignado(self):
"""
test_modificar_proyecto_con_campo_fecha_inicio_no_asignado
Prueba modificar un proyecto sin especificar el campo fecha de fin
"""
print("\nProbando modificar un proyecto sin especificar el campo fecha de fin")
self.client.login(username="testing", password="polijira2021")
proyecto_body = {
"nombre": "ProyectoTestCrear",
"fecha_inicio": datetime.date.today(),
"scrum_master_id": 3
}
response = self.client.put("/api/proyectos/1/", proyecto_body, "application/json")
self.assertEquals(response.status_code, 422)
body = response.json()
self.assertEquals(len(body["errors"]), 1)
self.assertEquals(body["errors"]["fecha_fin"], ["No se especificó ninguna fecha de fin"])
def test_modificar_proyecto_con_campo_scrum_master_id_sin_especificar(self):
"""
test_modificar_proyecto_con_campo_scrum_master_id_sin_especificar
Prueba modificar un proyecto sin especificar al Scrum Master
"""
print("\nProbando modificar un proyecto sin especificar al Scrum Master")
self.client.login(username="testing", password="polijira2021")
proyecto_body = {
"nombre": "ProyectoTestCrear",
"fecha_inicio": datetime.date.today(),
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
}
response = self.client.put("/api/proyectos/1/", proyecto_body, "application/json")
self.assertEquals(response.status_code, 422)
body = response.json()
self.assertEquals(len(body["errors"]), 1)
self.assertEquals(body["errors"]["scrum_master_id"], ["No se especificó el Scrum Master"])
def test_eliminar_proyecto(self):
"""
test_eliminar_proyecto Prueba eliminar un proyecto
"""
print("\nProbando eliminar un proyecto")
self.client.login(username="testing", password="polijira2021")
response = self.client.delete("/api/proyectos/1/")
self.assertEquals(response.status_code, 200)
body = response.json()
self.assertEquals(body["message"], "Proyecto Eliminado")
def test_eliminar_proyecto_sin_permiso_eliminar_proyecto(self):
"""
test_eliminar_proyecto_sin_permiso_eliminar_proyecto
Prueba eliminar un proyecto sin tener permiso eliminar proyecto
"""
print("\nProbando eliminar proyecto sin permiso eliminar proyecto")
self.client.login(username="testing", password="polijira2021")
Permiso.objects.get(pk=13).delete()
response = self.client.delete("/api/proyectos/1/")
self.assertEquals(response.status_code, 403)
body = response.json()
self.assertEquals(body["permission_required"], ["ver_proyectos", "eliminar_proyectos"])
self.assertEquals(body["error"], "forbidden")
def test_eliminar_proyecto_sin_permiso_ver_proyecto(self):
"""
test_eliminar_proyecto_sin_permiso_ver_proyecto
Prueba eliminar un proyecto sin tener permiso ver proyecto
"""
print("\nProbando eliminar proyecto sin permiso ver proyecto")
self.client.login(username="testing", password="polijira2021")
Permiso.objects.get(pk=13).delete()
response = self.client.delete("/api/proyectos/1/")
self.assertEquals(response.status_code, 403)
body = response.json()
self.assertEquals(body["permission_required"], ["ver_proyectos", "eliminar_proyectos"])
self.assertEquals(body["error"], "forbidden")
def test_eliminar_proyecto_no_existente(self):
"""
test_eliminar_proyecto_no_existente Prueba eliminar un proyecto que no existe
"""
print("\nProbando eliminar un proyecto que no existe")
self.client.login(username="testing", password="polijira2021")
response = self.client.delete("/api/proyectos/1000/")
self.assertEquals(response.status_code, 404)
body = response.json()
self.assertEquals(body["error"], "not_found")
def test_eliminar_proyecto_con_estado_activo(self):
"""
test_eliminar_proyecto_con_estado_activo Prueba eliminar un proyecto con estado Activado
"""
print("\nProbando eliminar un proyecto con estado Activo")
self.client.login(username="testing", password="polijira2021")
proyecto = Proyecto.objects.get(pk=1)
proyecto.estado = "A"
proyecto.save()
response = self.client.delete("/api/proyectos/1/")
self.assertEquals(response.status_code, 400)
body = response.json()
self.assertEquals(body["error"], "bad_request")
def test_eliminar_proyecto_con_estado_cancelado(self):
"""
test_eliminar_proyecto_con_estado_cancelado Prueba eliminar un proyecto con estado Cancelado
"""
print("\nProbando eliminar un proyecto en estado Cancelado")
self.client.login(username="testing", password="polijira2021")
proyecto = Proyecto.objects.get(pk=1)
proyecto.estado = "C"
proyecto.save()
response = self.client.delete("/api/proyectos/1/")
self.assertEquals(response.status_code, 400)
body = response.json()
self.assertEquals(body["error"], "bad_request")
def test_eliminar_proyecto_con_estado_finalizado(self):
"""
test_eliminar_proyecto_con_estado_finalizado Prueba eliminar un proyecto en estado Finalizado
"""
print("\nProbando eliminar un proyecto en estado Finalizado")
self.client.login(username="testing", password="polijira2021")
proyecto = Proyecto.objects.get(pk=1)
proyecto.estado = "F"
proyecto.save()
response = self.client.delete("/api/proyectos/1/")
self.assertEquals(response.status_code, 400)
body = response.json()
self.assertEquals(body["error"], "bad_request")
def test_crear_proyecto_con_fecha_en_el_pasado(self):
"""
test_crear_proyecto_con_fecha_en_el_pasado Prueba crear un proyecto con una fecha en el pasado
"""
print("\nProbando crear proyecto con una fecha que este en el pasado")
self.client.login(username="testing", password="polijira2021")
proyecto_body = {
"nombre": "ProyectoTestCrear",
"fecha_inicio": "1996-04-11",
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
"scrum_master_id": 2
}
response = self.client.post("/api/proyectos/", proyecto_body)
self.assertEquals(response.status_code, 422)
body = response.json()
self.assertEquals(body["errors"]["fecha_inicio"], ["La fecha de inicio no puede estar en el pasado"])
def test_crear_proyecto_con_fecha_fin_menor_a_fecha_inicio(self):
"""
test_crear_proyecto_con_fecha_fin_menor_a_fecha_inicio
Prueba crear un proyecto con una fecha de fin menor a la de inicio
"""
print("\nProbando crear un proyecto con una fecha de fin menor a la de inicio")
self.client.login(username="testing", password="polijira2021")
proyecto_body = {
"nombre": "ProyectoTestCrear",
"fecha_inicio": datetime.date.today(),
"fecha_fin": datetime.date.today() - datetime.timedelta(5),
"scrum_master_id": 2
}
response = self.client.post("/api/proyectos/", proyecto_body)
self.assertEquals(response.status_code, 422)
body = response.json()
self.assertEquals(body["errors"]["fecha_fin"], ["La fecha de fin no puede ser menor a la de inicio"])
def test_modificar_proyecto_con_fecha_en_el_pasado(self):
"""
test_modificar_proyecto_con_fecha_en_el_pasado Prueba modificar un proyecto con una fecha en el pasado
"""
print("\nProbando modificar proyecto con una fecha que este en el pasado")
self.client.login(username="testing", password="polijira2021")
proyecto_body = {
"nombre": "ProyectoTestCrear",
"fecha_inicio": "1996-04-11",
"fecha_fin": datetime.date.today() + datetime.timedelta(5),
"scrum_master_id": 2
}
response = self.client.put("/api/proyectos/1/", proyecto_body, "application/json")
self.assertEquals(response.status_code, 422)
body = response.json()
self.assertEquals(body["errors"]["fecha_inicio"], ["La fecha de inicio no puede estar en el pasado"])
def test_modificar_proyecto_con_fecha_fin_menor_a_fecha_inicio(self):
"""
test_modificar_proyecto_con_fecha_fin_menor_a_fecha_inicio
Prueba modificar un proyecto con una fecha de fin menor a la de inicio
"""
print("\nProbando modificar un proyecto con una fecha de fin menor a la de inicio")
self.client.login(username="testing", password="polijira2021")
proyecto_body = {
"nombre": "ProyectoTestCrear",
"fecha_inicio": datetime.date.today(),
"fecha_fin": datetime.date.today() - datetime.timedelta(5),
"scrum_master_id": 2
}
response = self.client.put("/api/proyectos/1/", proyecto_body, "application/json")
self.assertEquals(response.status_code, 422)
body = response.json()
self.assertEquals(body["errors"]["fecha_fin"], ["La fecha de fin no puede ser menor a la de inicio"])
def test_activar_proyecto(self):
"""
test_activar_proyecto Prueba activar un proyecto
"""
print("\nProbando activar un proyecto")
self.client.login(username="testing", password="polijira2021")
response = self.client.post("/api/proyectos/1/activar/")
self.assertEquals(response.status_code, 200)
proyecto = Proyecto.objects.get(pk=1)
self.assertEquals(proyecto.estado, "A")
self.assertEquals(proyecto.fecha_inicio, datetime.date.today())
def test_activar_proyecto_sin_permiso(self):
"""
test_activar_proyecto_permiso activar un proyecto no teniendo el permiso activar_proyecto
"""
print("\nProbando activar un proyecto sin ser Scrum Master")
self.client.login(username="testing", password="polijira2021")
rol = Miembro.objects.get(usuario_id=1, proyecto_id=1).rol
rol.eliminar_permiso(PermisoProyecto.objects.get(codigo="activar_proyecto"))
response = self.client.post("/api/proyectos/1/activar/")
self.assertEquals(response.status_code, 403)
body = response.json()
self.assertEquals(body["error"], "forbidden")
def test_activar_proyecto_no_existente(self):
"""
test_activar_proyecto_no_existente Prueba activar un proyecto que no existe
"""
print("\nProbando activar un proyecto que no existe")
self.client.login(username="testing", password="polijira2021")
response = self.client.post("/api/proyectos/1000/activar/")
self.assertEquals(response.status_code, 404)
body = response.json()
self.assertEquals(body["error"], "not_found")
def test_activar_proyecto_cancelado(self):
"""
test_activar_proyecto_activado Prueba activar un proyecto cancelado
"""
print("\nProbando activar un proyecto cancelado")
self.client.login(username="testing", password="polijira2021")
proyecto = Proyecto.objects.get(pk=1)
proyecto.estado = "C"
proyecto.save()
response = self.client.post("/api/proyectos/1/activar/")
self.assertEquals(response.status_code, 400)
body = response.json()
self.assertEquals(body["estado"], "Cancelado")
self.assertEquals(body["error"], "bad_request")
def test_activar_proyecto_finalizado(self):
"""
test_activar_proyecto_activado Prueba activar un proyecto finalizado
"""
print("\nProbando activar un proyecto finalizado")
self.client.login(username="testing", password="polijira2021")
proyecto = Proyecto.objects.get(pk=1)
proyecto.estado = "F"
proyecto.save()
response = self.client.post("/api/proyectos/1/activar/")
self.assertEquals(response.status_code, 400)
body = response.json()
self.assertEquals(body["estado"], "Finalizado")
self.assertEquals(body["error"], "bad_request")
def test_obtener_proyecto_sin_ser_miembro_con_permiso_ver_proyectos(self):
"""
test_obtener_proyecto_sin_ser_miembro_con_permiso_ver_proyectos
Prueba obtener un proyecto sin ser miembro, pero teniendo permiso para ver proyectos
"""
print("\nProbando obtener un proyecto sin ser miembro de ese proyecto y teniendo permiso para ver proyectos")
self.client.login(username="testing", password="polijira2021")
Miembro.objects.get(pk=1).delete()
response = self.client.get("/api/proyectos/1/")
self.assertEquals(response.status_code, 200)
body = response.json()
proyecto = Proyecto.objects.get(pk=1)
self.assertEquals(body['nombre'], proyecto.nombre)
self.assertEquals(body['fecha_inicio'], str(proyecto.fecha_inicio))
self.assertEquals(body['fecha_fin'], str(proyecto.fecha_fin))
self.assertEquals(body['scrum_master']['id'], proyecto.scrum_master.id)
self.assertEquals(body['estado'], proyecto.estado)
def test_obtener_proyecto_sin_permiso_ver_proyectos_siendo_miembro(self):
"""
test_obtener_proyecto_sin_permiso_ver_proyectos_siendo_miembro
Prueba obtener un proyecto sin permiso ver proyectos y siendo miembro de ese proyecto
"""
print("\nProbando obtener un proyecto sin permiso ver proyectos y siendo miembro")
self.client.login(username="testing", password="polijira2021")
Permiso.objects.get(codigo="ver_proyectos").delete()
response = self.client.get("/api/proyectos/1/")
self.assertEquals(response.status_code, 200)
body = response.json()
proyecto = Proyecto.objects.get(pk=1)
self.assertEquals(body['nombre'], proyecto.nombre)
self.assertEquals(body['fecha_inicio'], str(proyecto.fecha_inicio))
self.assertEquals(body['fecha_fin'], str(proyecto.fecha_fin))
self.assertEquals(body['scrum_master']['id'], proyecto.scrum_master.id)
self.assertEquals(body['estado'], proyecto.estado)
def test_obtener_proyecto_sin_permiso_ver_proyectos_no_siendo_miembro(self):
"""
test_obtener_proyecto_sin_permiso_ver_proyectos_no_siendo_miembro
Prueba obtener un proyecto sin tener permiso ver proyectos y sin ser miembro del proyecto
"""
print("\nProbando obtener un proyecto sin ser miembro y tener permiso de ver proyectos")
self.client.login(username="testing", password="polijira2021")
Miembro.objects.get(pk=1).delete()
Permiso.objects.get(codigo="ver_proyectos").delete()
response = self.client.get("/api/proyectos/1/")
self.assertEquals(response.status_code, 403)
def test_obtener_estimaciones_pendientes(self):
"""
test_obtener_estimaciones_pendientes
Prueba obtener las estimaciones pendientes del Proyecto
"""
print("\nProbando obtener las estimaciones pendientes del Proyecto.")
self.client.login(username="testing", password="polijira2021")
sprint_backlog = SprintBacklog.objects.get(pk=1)
sprint_backlog.estado_estimacion = "p"
sprint_backlog.save()
usuario = Usuario.objects.get(pk=1)
proyecto = Proyecto.objects.get(pk=3)
miembro = Miembro.objects.get(usuario=usuario, proyecto=proyecto)
us = SprintBacklog.objects.filter(estado_estimacion='p', desarrollador__miembro_proyecto=miembro)
response = self.client.get("/api/proyectos/" + str(proyecto.id) + "/estimaciones_pendientes/")
body = response.json()
self.assertEqual(response.status_code, 200)
self.assertEqual(len(body), len(us))
def test_obtener_estimaciones_pendientes_sin_ser_miembro(self):
"""
test_obtener_estimaciones_pendientes_sin_ser_miembro
Prueba obtener las estimaciones pendientes del Proyecto sin ser miembro
"""
print("\nProbando obtener las estimaciones pendientes del Proyecto sin ser miembro.")
self.client.login(username="user_test", password="polijira2021")
sprint_backlog = SprintBacklog.objects.get(pk=1)
sprint_backlog.estado_estimacion = "p"
sprint_backlog.save()
proyecto = Proyecto.objects.get(pk=3)
response = self.client.get("/api/proyectos/" + str(proyecto.id) + "/estimaciones_pendientes/")
body = response.json()
self.assertEqual(response.status_code, 403)
self.assertEqual(body["message"], "Usted no es miembro de este Proyecto")
self.assertEqual(body["error"], "forbidden")
def test_obtener_estimaciones_pendientes_proyecto_inexistente(self):
"""
test_obtener_estimaciones_pendientes_proyecto_inexistente
Prueba obtener las estimaciones pendientes de Proyecto inexistente
"""
print("\nProbando obtener las estimaciones pendientes de Proyecto inexistente.")
self.client.login(username="testing", password="polijira2021")
sprint_backlog = SprintBacklog.objects.get(pk=1)
sprint_backlog.estado_estimacion = "p"
sprint_backlog.save()
response = self.client.get("/api/proyectos/99/estimaciones_pendientes/")
body = response.json()
self.assertEqual(response.status_code, 404)
self.assertEqual(body["message"], "No existe el Proyecto")
self.assertEqual(body["error"], "not_found")
def test_finalizar_proyecto(self):
"""
test_finalizar_proyecto
Prueba finalizar un proyecto
"""
print("\nProbando finalizar un proyecto.")
self.client.login(username="testing", password="polijira2021")
proyecto = Proyecto.objects.get(pk=1)
proyecto.iniciar()
user_stories = UserStory.objects.filter(proyecto=proyecto)
for user_story in user_stories:
user_story.lanzar()
sprints = Sprint.objects.filter(proyecto=proyecto)
for sprint in sprints:
sprint.finalizar()
response = self.client.post("/api/proyectos/" + str(proyecto.id) + "/finalizar/")
body = response.json()
self.assertEqual(response.status_code, 200)
self.assertEqual(body["estado"], "F")
proyecto = Proyecto.objects.get(pk=1)
self.assertEqual(proyecto.estado, "F")
def test_finalizar_proyecto_sin_permiso_finalizar_proyecto(self):
"""
test_finalizar_proyecto_sin_permiso
Prueba finalizar un proyecto sin permiso
"""
print("\nProbando finalizar un proyecto sin permiso.")
self.client.login(username="testing", password="polijira2021")
PermisoProyecto.objects.get(codigo="finalizar_proyecto").delete()
proyecto = Proyecto.objects.get(pk=1)
proyecto.iniciar()
user_stories = UserStory.objects.filter(proyecto=proyecto)
for user_story in user_stories:
user_story.lanzar()
sprints = Sprint.objects.filter(proyecto=proyecto)
for sprint in sprints:
sprint.finalizar()
response = self.client.post("/api/proyectos/" + str(proyecto.id) + "/finalizar/")
body = response.json()
self.assertEqual(response.status_code, 403)
self.assertEqual(body["message"], "No tiene permiso para realizar esta accion")
self.assertEqual(body["error"], "forbidden")
def test_finalizar_proyecto_no_iniciado(self):
"""
test_finalizar_proyecto_no_iniciado
Prueba finalizar un proyecto sin iniciar
"""
print("\nProbando finalizar un proyecto sin iniciar.")
self.client.login(username="testing", password="polijira2021")
proyecto = Proyecto.objects.get(pk=1)
user_stories = UserStory.objects.filter(proyecto=proyecto)
for user_story in user_stories:
user_story.lanzar()
sprints = Sprint.objects.filter(proyecto=proyecto)
for sprint in sprints:
sprint.finalizar()
response = self.client.post("/api/proyectos/" + str(proyecto.id) + "/finalizar/")
body = response.json()
self.assertEqual(response.status_code, 400)
self.assertEqual(body["message"], "No puedes finalizar el Proyecto en su estado actual")
self.assertEqual(body["error"], "bad_request")
def test_finalizar_proyecto_sin_ser_miembro(self):
"""
test_finalizar_proyecto_sin_ser_miembro
Prueba finalizar un proyecto sin ser miembro
"""
print("\nProbando finalizar un proyecto sin ser miembro.")
self.client.login(username="user_test", password="polijira2021")
proyecto = Proyecto.objects.get(pk=1)
proyecto.iniciar()
user_stories = UserStory.objects.filter(proyecto=proyecto)
for user_story in user_stories:
user_story.lanzar()
sprints = Sprint.objects.filter(proyecto=proyecto)
for sprint in sprints:
sprint.finalizar()
response = self.client.post("/api/proyectos/" + str(proyecto.id) + "/finalizar/")
body = response.json()
self.assertEqual(response.status_code, 403)
self.assertEqual(body["message"], "Usted no es miembro de este Proyecto")
self.assertEqual(body["error"], "forbidden")
def test_finalizar_proyecto_inexistente(self):
"""
test_finalizar_proyecto_inexistente
Prueba finalizar un proyecto inexistente
"""
print("\nProbando finalizar un proyecto inexistente.")
self.client.login(username="testing", password="polijira2021")
response = self.client.post("/api/proyectos/99/finalizar/")
body = response.json()
self.assertEqual(response.status_code, 404)
self.assertEqual(body["message"], "El proyecto no existe")
self.assertEqual(body["error"], "not_found")
def test_finalizar_proyecto_con_sprints_activos(self):
"""
test_finalizar_proyecto_con_sprints_activos
Prueba finalizar un proyecto con sprints activos
"""
print("\nProbando finalizar un proyecto con sprints activos.")
self.client.login(username="testing", password="polijira2021")
proyecto = Proyecto.objects.get(pk=1)
proyecto.iniciar()
user_stories = UserStory.objects.filter(proyecto=proyecto)
for user_story in user_stories:
user_story.lanzar()
sprints = Sprint.objects.filter(proyecto=proyecto)
for sprint in sprints:
sprint.estado = "A"
sprint.save()
response = self.client.post("/api/proyectos/" + str(proyecto.id) + "/finalizar/")
body = response.json()
self.assertEqual(response.status_code, 400)
self.assertEqual(
body["message"], "No puedes finalizar el Proyecto hasta que todos los Sprints esten finalizados")
self.assertEqual(body["error"], "bad_request")
def test_finalizar_proyecto_con_user_stories_pendientes(self):
"""
test_finalizar_proyecto_con_user_stories_pendientes
Prueba finalizar un proyecto con user stories pendientes
"""
print("\nProbando finalizar un proyecto con user stories pendientes.")
self.client.login(username="testing", password="polijira2021")
proyecto = Proyecto.objects.get(pk=1)
proyecto.iniciar()
user_stories = UserStory.objects.filter(proyecto=proyecto)
for user_story in user_stories:
user_story.estado = "P"
user_story.save()
sprints = Sprint.objects.filter(proyecto=proyecto)
for sprint in sprints:
sprint.finalizar()
response = self.client.post("/api/proyectos/" + str(proyecto.id) + "/finalizar/")
body = response.json()
self.assertEqual(response.status_code, 400)
self.assertEqual(
body["message"],
"No puedes finalizar el Proyecto hasta que todos los User Stories esten lanzados o cancelados"
)
self.assertEqual(body["error"], "bad_request")
def test_cancelar_proyecto(self):
"""
test_cancelar_proyecto
Prueba cancelar un proyecto
"""
print("\nProbando cancelar un proyecto.")
self.client.login(username="testing", password="polijira2021")
proyecto = Proyecto.objects.get(pk=1)
proyecto.iniciar()
response = self.client.post("/api/proyectos/" + str(proyecto.id) + "/cancelar/")
sprint_activos = False
sprints = Sprint.objects.filter(proyecto=proyecto)
for sprint in sprints:
if sprint.estado == "A":
sprint_activos = True
body = response.json()
self.assertEqual(response.status_code, 200)
self.assertEqual(body["estado"], "C")
self.assertEqual(sprint_activos, False)
proyecto = Proyecto.objects.get(pk=1)
self.assertEqual(proyecto.estado, "C")
def test_cancelar_proyecto_sin_ser_miembro(self):
"""
test_cancelar_proyecto_sin_ser_miembro
Prueba cancelar un proyecto sin ser miembro
"""
print("\nProbando cancelar un proyecto sin ser miembro.")
self.client.login(username="user_test", password="polijira2021")
proyecto = Proyecto.objects.get(pk=1)
proyecto.iniciar()
response = self.client.post("/api/proyectos/" + str(proyecto.id) + "/cancelar/")
body = response.json()
self.assertEqual(response.status_code, 403)
self.assertEqual(body["message"], "Usted no es miembro de este Proyecto")
self.assertEqual(body["error"], "forbidden")
def test_cancelar_proyecto_inexistente(self):
"""
test_cancelar_proyecto_inexistente
Prueba cancelar un proyecto inexistente
"""
print("\nProbando cancelar un proyecto inexistente.")
self.client.login(username="testing", password="polijira2021")
response = self.client.post("/api/proyectos/99/cancelar/")
body = response.json()
self.assertEqual(response.status_code, 404)
self.assertEqual(body["message"], "El proyecto no existe")
self.assertEqual(body["error"], "not_found")
def test_cancelar_proyecto_sin_permiso_cancelar_proyecto(self):
"""
test_cancelar_proyecto_sin_permiso_cancelar_proyecto
Prueba cancelar un proyecto sin permisos
"""
print("\nProbando cancelar un proyecto sin permisos.")
self.client.login(username="testing", password="polijira2021")
PermisoProyecto.objects.get(codigo="cancelar_proyecto").delete()
proyecto = Proyecto.objects.get(pk=1)
proyecto.iniciar()
response = self.client.post("/api/proyectos/" + str(proyecto.id) + "/cancelar/")
body = response.json()
self.assertEqual(response.status_code, 403)
self.assertEqual(body["message"], "No tiene permiso para realizar esta acción")
self.assertEqual(body["error"], "forbidden")
def test_cancelar_proyecto_no_activo(self):
"""
test_cancelar_proyecto_no_activo
Prueba cancelar un proyecto no activo
"""
print("\nProbando cancelar un proyecto no activo.")
self.client.login(username="testing", password="polijira2021")
proyecto = Proyecto.objects.get(pk=1)
response = self.client.post("/api/proyectos/" + str(proyecto.id) + "/cancelar/")
body = response.json()
self.assertEqual(response.status_code, 400)
self.assertEqual(body["message"], "No puedes cancelar el Proyecto en su estado actual")
self.assertEqual(body["error"], "bad_request")
| 47.544218
| 117
| 0.659733
| 6,220
| 55,912
| 5.747749
| 0.037942
| 0.064446
| 0.040838
| 0.041174
| 0.941121
| 0.898129
| 0.831865
| 0.782971
| 0.757014
| 0.724931
| 0
| 0.015284
| 0.226517
| 55,912
| 1,175
| 118
| 47.584681
| 0.811386
| 0.115682
| 0
| 0.721649
| 0
| 0
| 0.245755
| 0.024347
| 0
| 0
| 0
| 0.002553
| 0.216495
| 1
| 0.074456
| false
| 0.07331
| 0.005727
| 0
| 0.082474
| 0.12142
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
86566fb95b84a16a481c83fb33c46c90d16c018d
| 8,315
|
py
|
Python
|
py/2015/2A.py
|
pedrotari7/advent_of_code
|
98d5bc8d903435624a019a5702f5421d7b4ef8c8
|
[
"MIT"
] | null | null | null |
py/2015/2A.py
|
pedrotari7/advent_of_code
|
98d5bc8d903435624a019a5702f5421d7b4ef8c8
|
[
"MIT"
] | null | null | null |
py/2015/2A.py
|
pedrotari7/advent_of_code
|
98d5bc8d903435624a019a5702f5421d7b4ef8c8
|
[
"MIT"
] | null | null | null |
presents = """20x3x11 15x27x5 6x29x7 30x15x9 19x29x21 10x4x15 1x26x4 1x5x18 10x15x23 10x14x20 3x5x18 29x23x30 7x4x10 22x24x29 30x1x2 19x2x5 11x9x22 23x15x10 11x11x10 30x28x5 22x5x4 6x26x20 16x12x30 10x20x5 25x14x24 16x17x22 11x28x26 1x11x10 1x24x15 13x17x21 30x3x13 20x25x17 22x12x5 22x20x24 9x2x14 6x18x8 27x28x24 11x17x1 1x4x12 5x20x13 24x23x23 22x1x25 18x19x5 5x23x13 8x16x4 20x21x9 1x7x11 8x30x17 3x30x9 6x16x18 22x25x27 9x20x26 16x21x23 5x24x17 15x17x15 26x15x10 22x16x3 20x24x24 8x18x10 23x19x16 1x21x24 23x23x9 14x20x6 25x5x5 16x3x1 29x29x20 11x4x26 10x23x24 29x25x16 27x27x22 9x7x22 6x21x18 25x11x19 14x13x3 15x28x17 14x3x12 29x8x19 30x14x20 20x23x4 8x16x5 4x11x18 20x8x24 21x13x21 14x26x29 27x4x17 27x4x25 5x28x6 23x24x11 29x22x5 30x20x6 23x2x10 11x4x7 27x23x6 10x20x19 8x20x22 5x29x22 16x13x2 2x11x14 6x12x4 3x13x6 16x5x18 25x3x28 21x1x5 20x16x19 28x30x27 26x7x18 25x27x24 11x19x7 21x19x17 2x12x27 20x5x14 8x5x8 6x24x8 7x28x20 3x20x28 5x20x30 13x29x1 26x29x5 19x28x25 5x19x11 11x20x22 4x23x1 19x25x12 3x10x6 3x14x10 28x16x12 23x12x2 23x12x19 20x28x10 9x10x25 16x21x16 1x18x20 9x4x26 3x25x8 17x16x28 9x28x16 27x3x12 17x24x12 13x21x10 7x17x13 6x10x9 7x29x25 11x19x30 1x24x5 20x16x23 24x28x21 6x29x19 25x2x19 12x5x26 25x29x12 16x28x22 26x26x15 9x13x5 10x29x7 1x24x16 22x2x2 6x16x13 3x12x28 4x12x13 14x27x21 14x23x26 7x5x18 8x30x27 15x9x18 26x16x5 3x29x17 19x7x18 16x18x1 26x15x30 24x30x21 13x20x7 4x12x10 27x20x11 28x29x21 20x14x30 28x12x3 19x1x8 4x8x6 21x14x2 27x19x21 17x24x14 15x18x11 18x7x26 25x28x29 27x26x9 18x12x17 24x28x25 13x24x14 26x9x28 9x3x30 9x2x9 8x1x29 18x30x10 18x14x5 26x8x30 12x1x1 30x5x28 26x17x21 10x10x10 20x7x27 13x17x6 21x13x17 2x16x8 7x9x9 15x26x4 11x28x25 10x6x19 21x6x29 15x5x6 28x9x16 14x3x10 12x29x5 22x19x19 25x15x22 30x6x28 11x23x13 20x25x14 26x1x13 6x14x15 16x25x17 28x4x13 10x24x25 4x13x10 9x15x16 15x24x6 22x9x19 11x11x8 4x19x12 24x5x4 27x12x13 7x27x16 2x6x9 29x27x15 18x26x23 19x16x15 14x5x25 9x16x30 4x6x4 13x10x10 1x8x29 23x5x17 19x20x20 11x27x24 27x15x5 15x11x12 21x11x3 1x13x22 17x8x8 13x14x14 17x22x7 9x5x8 2x6x3 25x9x15 11x8x13 9x25x12 3x16x12 12x16x8 16x24x17 4x6x26 22x29x11 14x17x19 28x2x27 24x22x19 22x20x30 23x28x4 16x12x14 22x24x22 29x1x28 26x29x16 3x25x30 27x3x13 22x24x26 25x3x2 7x24x2 10x5x3 28x8x29 25x6x4 12x17x14 24x3x5 23x27x7 26x23x30 11x10x19 23x7x11 26x14x15 14x3x25 12x24x14 2x14x12 9x12x16 9x2x28 3x8x2 22x6x9 2x30x2 25x1x9 20x11x2 14x11x12 7x14x12 24x8x26 13x21x23 18x17x23 13x6x17 20x20x19 13x17x29 7x24x24 23x8x6 19x10x28 3x8x21 15x20x18 11x27x1 11x24x28 13x20x11 18x19x22 27x22x12 28x3x2 13x4x29 26x5x6 14x29x25 7x4x7 5x17x7 2x8x1 22x30x24 22x21x28 1x28x13 11x20x4 25x29x19 9x23x4 30x6x11 25x18x10 28x10x24 3x5x20 19x28x10 27x19x2 26x20x4 19x21x6 2x12x30 8x26x27 11x27x10 14x13x17 4x3x21 2x20x21 22x30x3 2x23x2 3x16x12 22x28x22 3x23x29 8x25x15 9x30x4 10x11x1 24x8x20 10x7x27 7x22x4 27x13x17 5x28x5 30x15x13 10x8x17 8x21x5 8x17x26 25x16x4 9x7x25 13x11x20 6x30x9 15x14x12 30x1x23 5x20x24 22x7x6 26x11x23 29x7x5 13x24x28 22x20x10 18x3x1 15x19x23 28x28x20 7x26x2 9x12x20 15x4x6 1x17x21 3x22x17 9x4x20 25x19x5 9x11x22 14x1x17 14x5x16 30x5x18 19x6x12 28x16x22 13x4x25 29x23x18 1x27x3 12x14x4 10x25x19 15x19x30 11x30x4 11x22x26 13x25x2 17x13x27 11x30x24 15x1x14 17x18x4 26x11x3 16x22x28 13x20x9 1x18x3 25x11x12 20x21x1 22x27x4 8x28x23 7x13x27 17x9x26 27x27x20 11x20x12 26x21x11 29x14x12 27x25x1 28x29x25 21x23x28 5x18x18 19x5x4 7x6x30 27x8x11 12x24x12 16x25x22 26x11x29 25x22x17 15x23x23 17x9x6 30x10x16 21x3x5 18x27x2 28x21x14 16x18x17 4x18x2 9x1x14 9x1x9 5x27x12 8x16x30 3x19x19 16x26x24 1x6x9 15x14x3 11x7x19 8x19x3 17x26x26 6x18x11 19x12x4 29x20x16 20x17x23 6x6x5 20x30x19 18x25x18 2x26x2 3x1x1 14x25x18 3x1x6 11x14x18 17x23x27 25x29x9 6x25x20 20x10x9 17x5x18 29x14x8 14x25x26 10x15x29 23x19x11 22x2x2 4x5x5 13x23x25 19x13x19 20x18x6 30x7x28 26x18x17 29x18x10 30x29x1 12x26x24 18x17x26 29x28x15 3x12x20 24x10x8 30x15x6 28x23x15 14x28x11 10x27x19 14x8x21 24x1x23 1x3x27 6x15x6 8x25x26 13x10x25 6x9x8 10x29x29 26x23x5 14x24x1 25x6x22 17x11x18 1x27x26 18x25x23 20x15x6 2x21x28 2x10x13 12x25x14 2x14x23 30x5x23 29x19x21 29x10x25 14x22x16 17x11x26 12x17x30 8x17x7 20x25x28 20x11x30 15x1x12 13x3x24 16x23x23 27x3x3 26x3x27 18x5x12 12x26x7 19x27x12 20x10x28 30x12x25 3x14x10 21x26x1 24x26x26 7x21x30 3x29x12 29x28x5 5x20x7 27x11x2 15x20x4 16x15x15 19x13x7 7x17x15 27x24x15 9x17x28 20x21x14 14x29x29 23x26x13 27x23x21 18x13x6 26x16x21 18x26x27 9x3x12 30x18x24 12x11x29 5x15x1 1x16x3 14x28x11 2x18x1 19x18x19 18x28x21 2x3x14 22x16x5 28x18x28 24x16x18 7x4x10 19x26x19 24x17x7 25x9x6 25x17x7 20x22x20 3x3x7 23x19x15 21x27x21 1x23x11 9x19x4 22x4x18 6x15x5 15x25x2 23x11x20 27x16x6 27x8x5 10x10x19 22x14x1 7x1x29 8x11x17 27x9x27 28x9x24 17x7x3 26x23x8 7x6x30 25x28x2 1x30x25 3x18x18 28x27x15 14x14x1 10x25x29 18x12x9 20x28x16 26x27x22 8x26x1 21x2x12 25x16x14 21x19x5 12x9x22 16x5x4 5x4x16 25x29x3 4x29x13 15x16x29 8x11x24 30x11x20 17x21x14 12x24x10 10x12x6 3x26x30 15x14x25 20x12x21 13x11x16 15x13x3 5x17x29 6x3x23 9x26x11 30x1x8 14x10x30 18x30x10 13x19x19 16x19x17 28x7x10 28x29x4 3x21x10 4x28x24 7x28x9 2x4x9 25x27x13 6x12x15 4x18x20 20x1x16 5x13x24 11x11x10 12x9x23 1x9x30 17x28x24 9x5x27 21x15x16 17x4x14 8x14x4 13x10x7 17x12x14 9x19x19 2x7x21 8x24x23 19x5x12 11x23x21 13x3x1 5x27x15 12x25x25 13x21x16 9x17x11 1x15x21 4x26x17 11x5x15 23x10x15 12x17x21 27x15x1 4x29x14 5x24x25 10x10x12 18x12x9 11x24x23 24x23x3 28x12x15 29x9x14 11x25x8 5x12x2 26x26x29 9x21x2 8x8x25 1x16x30 17x29x20 9x22x13 7x18x16 3x3x23 26x25x30 15x23x24 20x23x5 20x16x10 23x7x8 20x18x26 8x27x6 30x23x23 7x7x24 21x11x15 1x30x25 26x27x22 30x28x13 20x13x13 3x1x15 16x7x1 7x25x15 12x7x18 16x9x23 16x12x18 29x5x2 17x7x7 21x17x5 9x9x17 26x16x10 29x29x23 17x26x10 5x19x17 1x10x1 14x21x20 13x6x4 13x13x3 23x4x18 4x16x3 16x30x11 2x11x2 15x30x15 20x30x22 18x12x16 23x5x16 6x14x15 9x4x11 30x23x21 20x7x12 7x18x6 15x6x5 18x22x19 16x10x22 26x20x25 9x25x25 29x21x10 9x21x24 7x18x21 14x3x15 18x19x19 4x29x17 14x10x9 2x26x14 13x3x24 4x4x17 6x27x24 2x18x3 14x25x2 30x14x17 11x6x14 4x10x18 15x4x2 27x7x10 13x24x1 7x12x6 25x22x26 19x2x18 23x29x2 2x15x4 12x6x9 16x14x29 9x17x3 21x9x12 23x18x22 10x8x4 29x2x7 19x27x15 4x24x27 25x20x14 8x23x19 1x24x19 6x20x10 15x8x5 18x28x5 17x23x22 9x16x13 30x24x4 26x3x13 12x22x18 29x17x29 26x4x16 15x7x20 9x15x30 12x7x18 28x19x18 11x23x23 24x20x1 20x3x24 1x26x1 14x10x6 5x27x24 13x21x12 20x20x5 6x28x9 11x26x11 26x29x12 21x4x11 20x11x17 22x27x20 19x11x21 2x11x11 13x5x7 12x10x25 21x28x1 15x30x17 28x19x1 4x19x12 11x4x12 4x10x30 11x18x5 22x20x12 3x7x27 20x26x4 13x27x26 23x14x13 4x19x7 26x27x16 20x5x20 18x5x8 19x21x1 22x8x1 29x4x1 24x10x15 24x9x20 10x3x8 29x30x3 2x8x24 16x7x18 2x11x23 23x15x16 21x12x6 24x28x9 6x1x13 14x29x20 27x24x13 16x26x8 5x6x17 21x8x1 28x19x21 1x14x16 18x2x9 29x28x10 22x26x27 18x26x23 22x24x2 28x26x1 27x29x12 30x13x11 1x25x5 13x30x18 3x13x22 22x10x11 2x7x7 18x17x8 9x22x26 30x18x16 10x2x3 7x27x13 3x20x16 9x21x16 1x18x15 21x30x30 4x25x23 3x11x7 5x6x12 27x1x20 13x15x24 23x29x2 13x5x24 22x16x15 28x14x3 29x24x9 2x20x4 30x10x4 23x7x20 22x12x21 3x19x11 4x28x28 5x4x7 28x12x25 2x16x26 23x20x7 5x21x29 9x21x16 9x6x10 9x6x4 24x14x29 28x11x6 10x22x1 21x30x20 13x17x8 2x25x24 19x21x3 28x8x14 6x29x28 27x10x28 30x11x12 17x2x10 14x19x17 2x11x4 26x1x2 13x4x4 23x20x18 2x17x21 28x7x15 3x3x27 24x17x30 28x28x20 21x5x29 13x12x19 24x29x29 19x10x6 19x12x14 21x4x17 27x16x1 4x17x30 23x23x18 23x15x27 26x2x11 12x8x8 15x23x26 30x17x15 17x17x15 24x4x30 9x9x10 14x25x20 25x11x19 20x7x1 9x21x3 7x19x9 10x6x19 26x12x30 21x9x20 15x11x6 30x21x9 10x18x17 22x9x8 8x30x26 28x12x27 17x17x7 11x13x8 5x3x21 24x1x29 1x28x2 18x28x10 8x29x14 26x26x27 17x10x25 22x30x3 27x9x13 21x21x4 30x29x16 22x7x20 24x10x2 16x29x17 28x15x17 19x19x22 9x8x6 26x23x24 25x4x27 16x12x2 11x6x18 19x14x8 9x29x13 23x30x19 10x16x1 4x21x28 23x25x25 19x9x16 30x11x12 24x3x9 28x19x4 18x12x9 7x1x25 28x7x1 24x3x12 30x24x22 27x24x26 9x30x30 29x10x8 4x6x18 10x1x15 10x4x26 23x20x16 6x3x14 30x8x16 25x14x20 11x9x3 15x23x25 8x30x22 22x19x18 25x1x12 27x25x7 25x23x3 13x20x8 5x30x7 18x19x27 20x23x3 1x17x21 21x21x27 13x1x24 7x30x20 21x9x18 23x26x6 22x9x29 17x6x21 28x28x29 19x25x26 9x27x21 5x26x8 11x19x1 10x1x18 29x4x8 21x2x22 14x12x8"""
presents = [sorted([int(d) for d in p.split('x')]) for p in presents.split()]
total = sum([p[1]*p[0] + 2*p[1]*p[2] + 2*p[0]*p[1] + 2*p[2]*p[0] for p in presents])
print total
| 1,187.857143
| 8,137
| 0.869513
| 1,043
| 8,315
| 6.931927
| 0.949185
| 0.001107
| 0.00166
| 0.003873
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.704461
| 0.123873
| 8,315
| 7
| 8,138
| 1,187.857143
| 0.287989
| 0
| 0
| 0
| 0
| 0.25
| 0.976551
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.25
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
865e127f8bf53cfcb9401018ad1d7b1f04698c86
| 54,836
|
py
|
Python
|
test-runner/adapters/rest/generated/e2erestapi/operations/_module_operations.py
|
Azure/iot-sdks-e2e-fx
|
b57f71aa3bda26839ba50e26c9cdefb52ea5b8df
|
[
"MIT"
] | 12
|
2019-02-02T00:15:13.000Z
|
2022-02-08T18:20:08.000Z
|
test-runner/adapters/rest/generated/e2erestapi/operations/_module_operations.py
|
Azure/iot-sdks-e2e-fx
|
b57f71aa3bda26839ba50e26c9cdefb52ea5b8df
|
[
"MIT"
] | 36
|
2019-02-14T22:53:17.000Z
|
2022-03-22T22:41:38.000Z
|
test-runner/adapters/rest/generated/e2erestapi/operations/_module_operations.py
|
Azure/iot-sdks-e2e-fx
|
b57f71aa3bda26839ba50e26c9cdefb52ea5b8df
|
[
"MIT"
] | 12
|
2019-02-19T13:28:25.000Z
|
2022-02-08T18:20:55.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from msrest.exceptions import HttpOperationError
from .. import models
class ModuleOperations(object):
"""ModuleOperations operations.
You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def connect(
self, transport_type, connection_string, ca_certificate=None, custom_headers=None, raw=False, **operation_config):
"""Connect to the azure IoT Hub as a module.
:param transport_type: Transport to use. Possible values include:
'amqp', 'amqpws', 'mqtt', 'mqttws', 'http'
:type transport_type: str
:param connection_string: connection string
:type connection_string: str
:param ca_certificate:
:type ca_certificate: ~e2erestapi.models.Certificate
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ConnectResponse or ClientRawResponse if raw=true
:rtype: ~e2erestapi.models.ConnectResponse or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.connect.metadata['url']
path_format_arguments = {
'transportType': self._serialize.url("transport_type", transport_type, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['connectionString'] = self._serialize.query("connection_string", connection_string, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if ca_certificate is not None:
body_content = self._serialize.body(ca_certificate, 'Certificate')
else:
body_content = None
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ConnectResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
connect.metadata = {'url': '/module/connect/{transportType}'}
def disconnect(
self, connection_id, custom_headers=None, raw=False, **operation_config):
"""Disconnect the module.
:param connection_id: Id for the connection
:type connection_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.disconnect.metadata['url']
path_format_arguments = {
'connectionId': self._serialize.url("connection_id", connection_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
disconnect.metadata = {'url': '/module/{connectionId}/disconnect'}
def connect_from_environment(
self, transport_type, custom_headers=None, raw=False, **operation_config):
"""Connect to the azure IoT Hub as a module using the environment
variables.
:param transport_type: Transport to use. Possible values include:
'amqp', 'amqpws', 'mqtt', 'mqttws', 'http'
:type transport_type: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ConnectResponse or ClientRawResponse if raw=true
:rtype: ~e2erestapi.models.ConnectResponse or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.connect_from_environment.metadata['url']
path_format_arguments = {
'transportType': self._serialize.url("transport_type", transport_type, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ConnectResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
connect_from_environment.metadata = {'url': '/module/connectFromEnvironment/{transportType}'}
def create_from_connection_string(
self, transport_type, connection_string, ca_certificate=None, custom_headers=None, raw=False, **operation_config):
"""Create a module client from a connection string.
:param transport_type: Transport to use. Possible values include:
'amqp', 'amqpws', 'mqtt', 'mqttws', 'http'
:type transport_type: str
:param connection_string: connection string
:type connection_string: str
:param ca_certificate:
:type ca_certificate: ~e2erestapi.models.Certificate
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ConnectResponse or ClientRawResponse if raw=true
:rtype: ~e2erestapi.models.ConnectResponse or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.create_from_connection_string.metadata['url']
path_format_arguments = {
'transportType': self._serialize.url("transport_type", transport_type, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['connectionString'] = self._serialize.query("connection_string", connection_string, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if ca_certificate is not None:
body_content = self._serialize.body(ca_certificate, 'Certificate')
else:
body_content = None
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ConnectResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
create_from_connection_string.metadata = {'url': '/module/createFromConnectionstring/{transportType}'}
def create_from_environment(
self, transport_type, custom_headers=None, raw=False, **operation_config):
"""Create a module client using the EdgeHub environment.
:param transport_type: Transport to use. Possible values include:
'amqp', 'amqpws', 'mqtt', 'mqttws', 'http'
:type transport_type: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ConnectResponse or ClientRawResponse if raw=true
:rtype: ~e2erestapi.models.ConnectResponse or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.create_from_environment.metadata['url']
path_format_arguments = {
'transportType': self._serialize.url("transport_type", transport_type, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ConnectResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
create_from_environment.metadata = {'url': '/module/createFromEnvironment/{transportType}'}
def create_from_x509(
self, transport_type, x509, custom_headers=None, raw=False, **operation_config):
"""Create a module client from X509 credentials.
:param transport_type: Transport to use. Possible values include:
'amqp', 'amqpws', 'mqtt', 'mqttws', 'http'
:type transport_type: str
:param x509:
:type x509: object
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ConnectResponse or ClientRawResponse if raw=true
:rtype: ~e2erestapi.models.ConnectResponse or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.create_from_x509.metadata['url']
path_format_arguments = {
'transportType': self._serialize.url("transport_type", transport_type, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(x509, 'object')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ConnectResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
create_from_x509.metadata = {'url': '/module/createFromX509/{transportType}'}
def create_from_symmetric_key(
self, transport_type, device_id, module_id, hostname, symmetric_key, custom_headers=None, raw=False, **operation_config):
"""Create a module client from a symmetric key.
:param transport_type: Transport to use. Possible values include:
'amqp', 'amqpws', 'mqtt', 'mqttws', 'http'
:type transport_type: str
:param device_id:
:type device_id: str
:param module_id:
:type module_id: str
:param hostname: name of the host to connect to
:type hostname: str
:param symmetric_key: key to use for connection
:type symmetric_key: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ConnectResponse or ClientRawResponse if raw=true
:rtype: ~e2erestapi.models.ConnectResponse or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.create_from_symmetric_key.metadata['url']
path_format_arguments = {
'transportType': self._serialize.url("transport_type", transport_type, 'str'),
'deviceId': self._serialize.url("device_id", device_id, 'str'),
'moduleId': self._serialize.url("module_id", module_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['hostname'] = self._serialize.query("hostname", hostname, 'str')
query_parameters['symmetricKey'] = self._serialize.query("symmetric_key", symmetric_key, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ConnectResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
create_from_symmetric_key.metadata = {'url': '/module/createFromSymmetricKey/{deviceId}/{moduleId}/{transportType}'}
def connect2(
self, connection_id, custom_headers=None, raw=False, **operation_config):
"""Connect the module.
:param connection_id: Id for the connection
:type connection_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.connect2.metadata['url']
path_format_arguments = {
'connectionId': self._serialize.url("connection_id", connection_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
connect2.metadata = {'url': '/module/{connectionId}/connect2'}
def reconnect(
self, connection_id, force_renew_password=None, custom_headers=None, raw=False, **operation_config):
"""Reconnect the module.
:param connection_id: Id for the connection
:type connection_id: str
:param force_renew_password: True to force SAS renewal
:type force_renew_password: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.reconnect.metadata['url']
path_format_arguments = {
'connectionId': self._serialize.url("connection_id", connection_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if force_renew_password is not None:
query_parameters['forceRenewPassword'] = self._serialize.query("force_renew_password", force_renew_password, 'bool')
# Construct headers
header_parameters = {}
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
reconnect.metadata = {'url': '/module/{connectionId}/reconnect'}
def disconnect2(
self, connection_id, custom_headers=None, raw=False, **operation_config):
"""Disonnect the module.
:param connection_id: Id for the connection
:type connection_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.disconnect2.metadata['url']
path_format_arguments = {
'connectionId': self._serialize.url("connection_id", connection_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
disconnect2.metadata = {'url': '/module/{connectionId}/disconnect2'}
def destroy(
self, connection_id, custom_headers=None, raw=False, **operation_config):
"""Disonnect and destroy the module client.
:param connection_id: Id for the connection
:type connection_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.destroy.metadata['url']
path_format_arguments = {
'connectionId': self._serialize.url("connection_id", connection_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
destroy.metadata = {'url': '/module/{connectionId}/destroy'}
def enable_twin(
self, connection_id, custom_headers=None, raw=False, **operation_config):
"""Enable module twins.
:param connection_id: Id for the connection
:type connection_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.enable_twin.metadata['url']
path_format_arguments = {
'connectionId': self._serialize.url("connection_id", connection_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
enable_twin.metadata = {'url': '/module/{connectionId}/enableTwin'}
def enable_methods(
self, connection_id, custom_headers=None, raw=False, **operation_config):
"""Enable methods.
:param connection_id: Id for the connection
:type connection_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.enable_methods.metadata['url']
path_format_arguments = {
'connectionId': self._serialize.url("connection_id", connection_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
enable_methods.metadata = {'url': '/module/{connectionId}/enableMethods'}
def enable_input_messages(
self, connection_id, custom_headers=None, raw=False, **operation_config):
"""Enable input messages.
:param connection_id: Id for the connection
:type connection_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.enable_input_messages.metadata['url']
path_format_arguments = {
'connectionId': self._serialize.url("connection_id", connection_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
enable_input_messages.metadata = {'url': '/module/{connectionId}/enableInputMessages'}
def get_twin(
self, connection_id, custom_headers=None, raw=False, **operation_config):
"""Get the device twin.
:param connection_id: Id for the connection
:type connection_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Twin or ClientRawResponse if raw=true
:rtype: ~e2erestapi.models.Twin or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_twin.metadata['url']
path_format_arguments = {
'connectionId': self._serialize.url("connection_id", connection_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Twin', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_twin.metadata = {'url': '/module/{connectionId}/twin'}
def patch_twin(
self, connection_id, twin, custom_headers=None, raw=False, **operation_config):
"""Updates the device twin.
:param connection_id: Id for the connection
:type connection_id: str
:param twin:
:type twin: ~e2erestapi.models.Twin
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.patch_twin.metadata['url']
path_format_arguments = {
'connectionId': self._serialize.url("connection_id", connection_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(twin, 'Twin')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
patch_twin.metadata = {'url': '/module/{connectionId}/twin'}
def wait_for_desired_properties_patch(
self, connection_id, custom_headers=None, raw=False, **operation_config):
"""Wait for the next desired property patch.
:param connection_id: Id for the connection
:type connection_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Twin or ClientRawResponse if raw=true
:rtype: ~e2erestapi.models.Twin or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.wait_for_desired_properties_patch.metadata['url']
path_format_arguments = {
'connectionId': self._serialize.url("connection_id", connection_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Twin', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
wait_for_desired_properties_patch.metadata = {'url': '/module/{connectionId}/twinDesiredPropPatch'}
def send_event(
self, connection_id, event_body, custom_headers=None, raw=False, **operation_config):
"""Send an event.
:param connection_id: Id for the connection
:type connection_id: str
:param event_body:
:type event_body: ~e2erestapi.models.EventBody
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.send_event.metadata['url']
path_format_arguments = {
'connectionId': self._serialize.url("connection_id", connection_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(event_body, 'EventBody')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
send_event.metadata = {'url': '/module/{connectionId}/event'}
def send_output_event(
self, connection_id, output_name, event_body, custom_headers=None, raw=False, **operation_config):
"""Send an event to a module output.
:param connection_id: Id for the connection
:type connection_id: str
:param output_name:
:type output_name: str
:param event_body:
:type event_body: ~e2erestapi.models.EventBody
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.send_output_event.metadata['url']
path_format_arguments = {
'connectionId': self._serialize.url("connection_id", connection_id, 'str'),
'outputName': self._serialize.url("output_name", output_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(event_body, 'EventBody')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
send_output_event.metadata = {'url': '/module/{connectionId}/outputEvent/{outputName}'}
def wait_for_input_message(
self, connection_id, input_name, custom_headers=None, raw=False, **operation_config):
"""Wait for a message on a module input.
:param connection_id: Id for the connection
:type connection_id: str
:param input_name:
:type input_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: EventBody or ClientRawResponse if raw=true
:rtype: ~e2erestapi.models.EventBody or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.wait_for_input_message.metadata['url']
path_format_arguments = {
'connectionId': self._serialize.url("connection_id", connection_id, 'str'),
'inputName': self._serialize.url("input_name", input_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('EventBody', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
wait_for_input_message.metadata = {'url': '/module/{connectionId}/inputMessage/{inputName}'}
def wait_for_method_and_return_response(
self, connection_id, method_name, request_and_response, custom_headers=None, raw=False, **operation_config):
"""Wait for a method call, verify the request, and return the response.
This is a workaround to deal with SDKs that only have method call
operations that are sync. This function responds to the method with
the payload of this function, and then returns the method parameters.
Real-world implemenatations would never do this, but this is the only
same way to write our test code right now (because the method handlers
for C, Java, and probably Python all return the method response instead
of supporting an async method call).
:param connection_id: Id for the connection
:type connection_id: str
:param method_name: name of the method to handle
:type method_name: str
:param request_and_response:
:type request_and_response:
~e2erestapi.models.MethodRequestAndResponse
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.wait_for_method_and_return_response.metadata['url']
path_format_arguments = {
'connectionId': self._serialize.url("connection_id", connection_id, 'str'),
'methodName': self._serialize.url("method_name", method_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(request_and_response, 'MethodRequestAndResponse')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
wait_for_method_and_return_response.metadata = {'url': '/module/{connectionId}/waitForMethodAndReturnResponse/{methodName}'}
def invoke_module_method(
self, connection_id, device_id, module_id, method_invoke_parameters, custom_headers=None, raw=False, **operation_config):
"""call the given method on the given module.
:param connection_id: Id for the connection
:type connection_id: str
:param device_id:
:type device_id: str
:param module_id:
:type module_id: str
:param method_invoke_parameters:
:type method_invoke_parameters: ~e2erestapi.models.MethodInvoke
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.invoke_module_method.metadata['url']
path_format_arguments = {
'connectionId': self._serialize.url("connection_id", connection_id, 'str'),
'deviceId': self._serialize.url("device_id", device_id, 'str'),
'moduleId': self._serialize.url("module_id", module_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(method_invoke_parameters, 'MethodInvoke')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('object', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
invoke_module_method.metadata = {'url': '/module/{connectionId}/moduleMethod/{deviceId}/{moduleId}'}
def invoke_device_method(
self, connection_id, device_id, method_invoke_parameters, custom_headers=None, raw=False, **operation_config):
"""call the given method on the given device.
:param connection_id: Id for the connection
:type connection_id: str
:param device_id:
:type device_id: str
:param method_invoke_parameters:
:type method_invoke_parameters: ~e2erestapi.models.MethodInvoke
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.invoke_device_method.metadata['url']
path_format_arguments = {
'connectionId': self._serialize.url("connection_id", connection_id, 'str'),
'deviceId': self._serialize.url("device_id", device_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(method_invoke_parameters, 'MethodInvoke')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('object', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
invoke_device_method.metadata = {'url': '/module/{connectionId}/deviceMethod/{deviceId}'}
def get_connection_status(
self, connection_id, custom_headers=None, raw=False, **operation_config):
"""get the current connection status.
:param connection_id: Id for the connection
:type connection_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: str or ClientRawResponse if raw=true
:rtype: str or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_connection_status.metadata['url']
path_format_arguments = {
'connectionId': self._serialize.url("connection_id", connection_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('str', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_connection_status.metadata = {'url': '/module/{connectionId}/connectionStatus'}
def wait_for_connection_status_change(
self, connection_id, connection_status, custom_headers=None, raw=False, **operation_config):
"""wait for the current connection status to change and return the changed
status.
:param connection_id: Id for the connection
:type connection_id: str
:param connection_status: Desired connection status. Possible values
include: 'connected', 'disconnected'
:type connection_status: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: str or ClientRawResponse if raw=true
:rtype: str or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.wait_for_connection_status_change.metadata['url']
path_format_arguments = {
'connectionId': self._serialize.url("connection_id", connection_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['connectionStatus'] = self._serialize.query("connection_status", connection_status, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('str', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
wait_for_connection_status_change.metadata = {'url': '/module/{connectionId}/connectionStatusChange'}
| 41.292169
| 136
| 0.665019
| 5,660
| 54,836
| 6.240283
| 0.045053
| 0.036806
| 0.028313
| 0.031144
| 0.888392
| 0.876387
| 0.870696
| 0.863052
| 0.858409
| 0.858409
| 0
| 0.00427
| 0.248286
| 54,836
| 1,327
| 137
| 41.323286
| 0.852575
| 0.342184
| 0
| 0.7691
| 1
| 0
| 0.095865
| 0.031843
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044143
| false
| 0.005093
| 0.005093
| 0
| 0.117148
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
86d6b0f632c57c531653f2fc0777c7d4ab0b0fd7
| 14,034
|
py
|
Python
|
tests/test_oauth2_resource_owner_password.py
|
bottoy/requests_auth
|
f95ecd833d52341ebe0e2c974d133577ae124dd9
|
[
"MIT"
] | null | null | null |
tests/test_oauth2_resource_owner_password.py
|
bottoy/requests_auth
|
f95ecd833d52341ebe0e2c974d133577ae124dd9
|
[
"MIT"
] | null | null | null |
tests/test_oauth2_resource_owner_password.py
|
bottoy/requests_auth
|
f95ecd833d52341ebe0e2c974d133577ae124dd9
|
[
"MIT"
] | null | null | null |
from responses import RequestsMock
import pytest
import requests
import requests_auth
from tests.oauth2_helper import token_cache
from tests.auth_helper import get_header, get_request
def test_oauth2_password_credentials_flow_token_is_sent_in_authorization_header_by_default(
token_cache, responses: RequestsMock
):
auth = requests_auth.OAuth2ResourceOwnerPasswordCredentials(
"http://provide_access_token", username="test_user", password="test_pwd"
)
responses.add(
responses.POST,
"http://provide_access_token",
json={
"access_token": "2YotnFZFEjr1zCsicMWpAA",
"token_type": "example",
"expires_in": 3600,
"refresh_token": "tGzv3JOkF0XG5Qx2TlKWIA",
"example_parameter": "example_value",
},
)
assert (
get_header(responses, auth).get("Authorization")
== "Bearer 2YotnFZFEjr1zCsicMWpAA"
)
assert (
get_request(responses, "http://provide_access_token/").body
== "grant_type=password&username=test_user&password=test_pwd"
)
def test_scope_is_sent_as_is_when_provided_as_str(token_cache, responses: RequestsMock):
auth = requests_auth.OAuth2ResourceOwnerPasswordCredentials(
"http://provide_access_token",
username="test_user",
password="test_pwd",
scope="my_scope+my_other_scope",
)
responses.add(
responses.POST,
"http://provide_access_token",
json={
"access_token": "2YotnFZFEjr1zCsicMWpAA",
"token_type": "example",
"expires_in": 3600,
"refresh_token": "tGzv3JOkF0XG5Qx2TlKWIA",
"example_parameter": "example_value",
},
)
assert (
get_header(responses, auth).get("Authorization")
== "Bearer 2YotnFZFEjr1zCsicMWpAA"
)
assert (
get_request(responses, "http://provide_access_token/").body
== "grant_type=password&username=test_user&password=test_pwd&scope=my_scope%2Bmy_other_scope"
)
def test_scope_is_sent_as_str_when_provided_as_list(
token_cache, responses: RequestsMock
):
auth = requests_auth.OAuth2ResourceOwnerPasswordCredentials(
"http://provide_access_token",
username="test_user",
password="test_pwd",
scope=["my_scope", "my_other_scope"],
)
responses.add(
responses.POST,
"http://provide_access_token",
json={
"access_token": "2YotnFZFEjr1zCsicMWpAA",
"token_type": "example",
"expires_in": 3600,
"refresh_token": "tGzv3JOkF0XG5Qx2TlKWIA",
"example_parameter": "example_value",
},
)
assert (
get_header(responses, auth).get("Authorization")
== "Bearer 2YotnFZFEjr1zCsicMWpAA"
)
assert (
get_request(responses, "http://provide_access_token/").body
== "grant_type=password&username=test_user&password=test_pwd&scope=my_scope+my_other_scope"
)
def test_with_invalid_grant_request_no_json(token_cache, responses: RequestsMock):
auth = requests_auth.OAuth2ResourceOwnerPasswordCredentials(
"http://provide_access_token", username="test_user", password="test_pwd"
)
responses.add(
responses.POST, "http://provide_access_token", body="failure", status=400
)
with pytest.raises(requests_auth.InvalidGrantRequest) as exception_info:
requests.get("http://authorized_only", auth=auth)
assert str(exception_info.value) == "failure"
def test_with_invalid_grant_request_invalid_request_error(
token_cache, responses: RequestsMock
):
auth = requests_auth.OAuth2ResourceOwnerPasswordCredentials(
"http://provide_access_token", username="test_user", password="test_pwd"
)
responses.add(
responses.POST,
"http://provide_access_token",
json={"error": "invalid_request"},
status=400,
)
with pytest.raises(requests_auth.InvalidGrantRequest) as exception_info:
requests.get("http://authorized_only", auth=auth)
assert (
str(exception_info.value)
== "invalid_request: The request is missing a required parameter, includes an "
"unsupported parameter value (other than grant type), repeats a parameter, "
"includes multiple credentials, utilizes more than one mechanism for "
"authenticating the client, or is otherwise malformed."
)
def test_with_invalid_grant_request_invalid_request_error_and_error_description(
token_cache, responses: RequestsMock
):
auth = requests_auth.OAuth2ResourceOwnerPasswordCredentials(
"http://provide_access_token", username="test_user", password="test_pwd"
)
responses.add(
responses.POST,
"http://provide_access_token",
json={"error": "invalid_request", "error_description": "desc of the error"},
status=400,
)
with pytest.raises(requests_auth.InvalidGrantRequest) as exception_info:
requests.get("http://authorized_only", auth=auth)
assert str(exception_info.value) == "invalid_request: desc of the error"
def test_with_invalid_grant_request_invalid_request_error_and_error_description_and_uri(
token_cache, responses: RequestsMock
):
auth = requests_auth.OAuth2ResourceOwnerPasswordCredentials(
"http://provide_access_token", username="test_user", password="test_pwd"
)
responses.add(
responses.POST,
"http://provide_access_token",
json={
"error": "invalid_request",
"error_description": "desc of the error",
"error_uri": "http://test_url",
},
status=400,
)
with pytest.raises(requests_auth.InvalidGrantRequest) as exception_info:
requests.get("http://authorized_only", auth=auth)
assert (
str(exception_info.value)
== f"invalid_request: desc of the error\nMore information can be found on http://test_url"
)
def test_with_invalid_grant_request_invalid_request_error_and_error_description_and_uri_and_other_fields(
token_cache, responses: RequestsMock
):
auth = requests_auth.OAuth2ResourceOwnerPasswordCredentials(
"http://provide_access_token", username="test_user", password="test_pwd"
)
responses.add(
responses.POST,
"http://provide_access_token",
json={
"error": "invalid_request",
"error_description": "desc of the error",
"error_uri": "http://test_url",
"other": "other info",
},
status=400,
)
with pytest.raises(requests_auth.InvalidGrantRequest) as exception_info:
requests.get("http://authorized_only", auth=auth)
assert (
str(exception_info.value)
== f"invalid_request: desc of the error\nMore information can be found on http://test_url\nAdditional information: {{'other': 'other info'}}"
)
def test_with_invalid_grant_request_without_error(token_cache, responses: RequestsMock):
auth = requests_auth.OAuth2ResourceOwnerPasswordCredentials(
"http://provide_access_token", username="test_user", password="test_pwd"
)
responses.add(
responses.POST,
"http://provide_access_token",
json={"other": "other info"},
status=400,
)
with pytest.raises(requests_auth.InvalidGrantRequest) as exception_info:
requests.get("http://authorized_only", auth=auth)
assert str(exception_info.value) == "{'other': 'other info'}"
def test_with_invalid_grant_request_invalid_client_error(
token_cache, responses: RequestsMock
):
auth = requests_auth.OAuth2ResourceOwnerPasswordCredentials(
"http://provide_access_token", username="test_user", password="test_pwd"
)
responses.add(
responses.POST,
"http://provide_access_token",
json={"error": "invalid_client"},
status=400,
)
with pytest.raises(requests_auth.InvalidGrantRequest) as exception_info:
requests.get("http://authorized_only", auth=auth)
assert (
str(exception_info.value)
== "invalid_client: Client authentication failed (e.g., unknown client, no "
"client authentication included, or unsupported authentication method). The "
"authorization server MAY return an HTTP 401 (Unauthorized) status code to "
"indicate which HTTP authentication schemes are supported. If the client "
'attempted to authenticate via the "Authorization" request header field, the '
"authorization server MUST respond with an HTTP 401 (Unauthorized) status "
'code and include the "WWW-Authenticate" response header field matching the '
"authentication scheme used by the client."
)
def test_with_invalid_grant_request_invalid_grant_error(
token_cache, responses: RequestsMock
):
auth = requests_auth.OAuth2ResourceOwnerPasswordCredentials(
"http://provide_access_token", username="test_user", password="test_pwd"
)
responses.add(
responses.POST,
"http://provide_access_token",
json={"error": "invalid_grant"},
status=400,
)
with pytest.raises(requests_auth.InvalidGrantRequest) as exception_info:
requests.get("http://authorized_only", auth=auth)
assert (
str(exception_info.value)
== "invalid_grant: The provided authorization grant (e.g., authorization code, "
"resource owner credentials) or refresh token is invalid, expired, revoked, "
"does not match the redirection URI used in the authorization request, or was "
"issued to another client."
)
def test_with_invalid_grant_request_unauthorized_client_error(
token_cache, responses: RequestsMock
):
auth = requests_auth.OAuth2ResourceOwnerPasswordCredentials(
"http://provide_access_token", username="test_user", password="test_pwd"
)
responses.add(
responses.POST,
"http://provide_access_token",
json={"error": "unauthorized_client"},
status=400,
)
with pytest.raises(requests_auth.InvalidGrantRequest) as exception_info:
requests.get("http://authorized_only", auth=auth)
assert (
str(exception_info.value)
== "unauthorized_client: The authenticated client is not authorized to use this "
"authorization grant type."
)
def test_with_invalid_grant_request_unsupported_grant_type_error(
token_cache, responses: RequestsMock
):
auth = requests_auth.OAuth2ResourceOwnerPasswordCredentials(
"http://provide_access_token", username="test_user", password="test_pwd"
)
responses.add(
responses.POST,
"http://provide_access_token",
json={"error": "unsupported_grant_type"},
status=400,
)
with pytest.raises(requests_auth.InvalidGrantRequest) as exception_info:
requests.get("http://authorized_only", auth=auth)
assert (
str(exception_info.value)
== "unsupported_grant_type: The authorization grant type is not supported by the "
"authorization server."
)
def test_with_invalid_grant_request_invalid_scope_error(
token_cache, responses: RequestsMock
):
auth = requests_auth.OAuth2ResourceOwnerPasswordCredentials(
"http://provide_access_token", username="test_user", password="test_pwd"
)
responses.add(
responses.POST,
"http://provide_access_token",
json={"error": "invalid_scope"},
status=400,
)
with pytest.raises(requests_auth.InvalidGrantRequest) as exception_info:
requests.get("http://authorized_only", auth=auth)
assert (
str(exception_info.value)
== "invalid_scope: The requested scope is invalid, unknown, malformed, or "
"exceeds the scope granted by the resource owner."
)
def test_without_expected_token(token_cache, responses: RequestsMock):
auth = requests_auth.OAuth2ResourceOwnerPasswordCredentials(
"http://provide_access_token",
username="test_user",
password="test_pwd",
token_field_name="not_provided",
)
responses.add(
responses.POST,
"http://provide_access_token",
json={
"access_token": "2YotnFZFEjr1zCsicMWpAA",
"token_type": "example",
"expires_in": 3600,
"refresh_token": "tGzv3JOkF0XG5Qx2TlKWIA",
"example_parameter": "example_value",
},
)
with pytest.raises(requests_auth.GrantNotProvided) as exception_info:
requests.get("http://authorized_only", auth=auth)
assert (
str(exception_info.value)
== "not_provided not provided within {'access_token': '2YotnFZFEjr1zCsicMWpAA', 'token_type': 'example', 'expires_in': 3600, 'refresh_token': 'tGzv3JOkF0XG5Qx2TlKWIA', 'example_parameter': 'example_value'}."
)
def test_token_url_is_mandatory():
with pytest.raises(Exception) as exception_info:
requests_auth.OAuth2ResourceOwnerPasswordCredentials(
"", "test_user", "test_pwd"
)
assert str(exception_info.value) == "Token URL is mandatory."
def test_user_name_is_mandatory():
with pytest.raises(Exception) as exception_info:
requests_auth.OAuth2ResourceOwnerPasswordCredentials(
"http://test_url", "", "test_pwd"
)
assert str(exception_info.value) == "User name is mandatory."
def test_password_is_mandatory():
with pytest.raises(Exception) as exception_info:
requests_auth.OAuth2ResourceOwnerPasswordCredentials(
"http://test_url", "test_user", ""
)
assert str(exception_info.value) == "Password is mandatory."
def test_header_value_must_contains_token():
with pytest.raises(Exception) as exception_info:
requests_auth.OAuth2ResourceOwnerPasswordCredentials(
"http://test_url", "test_user", "test_pwd", header_value="Bearer token"
)
assert str(exception_info.value) == "header_value parameter must contains {token}."
| 36.931579
| 215
| 0.684053
| 1,488
| 14,034
| 6.159946
| 0.108871
| 0.045603
| 0.061204
| 0.079206
| 0.804822
| 0.79544
| 0.776238
| 0.755837
| 0.755837
| 0.741436
| 0
| 0.010579
| 0.211914
| 14,034
| 379
| 216
| 37.029024
| 0.818174
| 0
| 0
| 0.608824
| 0
| 0.005882
| 0.342454
| 0.042041
| 0
| 0
| 0
| 0
| 0.064706
| 1
| 0.055882
| false
| 0.117647
| 0.017647
| 0
| 0.073529
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
86e9edf3aae31c827a04d678ebd3f96beea33c5b
| 147,957
|
py
|
Python
|
utils/common_sql.py
|
gtouchgogo/qtalk_search
|
9cc34572c187033fb860c1b1988ca3d24c95738b
|
[
"MIT"
] | 1
|
2021-01-08T07:47:39.000Z
|
2021-01-08T07:47:39.000Z
|
utils/common_sql.py
|
startalkIM/search
|
7737b4e83cd02e63a78e2e93d2b960f70d33539f
|
[
"MIT"
] | 2
|
2019-12-04T07:59:36.000Z
|
2021-09-15T09:19:19.000Z
|
utils/common_sql.py
|
startalkIM/search
|
7737b4e83cd02e63a78e2e93d2b960f70d33539f
|
[
"MIT"
] | 6
|
2019-11-21T14:10:58.000Z
|
2021-02-15T17:30:04.000Z
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
__author__ = 'jingyu.he'
import psycopg2
from psycopg2 import sql
import re
import sys
import time
import json
import asyncpg
from collections import defaultdict
from functools import reduce
from utils.get_conf import get_config_file, get_logger_file
from utils.logger_conf import configure_logger
# import conf.constants
# from conf import constants
from conf.cache_params_define import SINGLE_KEY, MUC_KEY, SINGLE_TRACE_KEY, MUC_TRACE_KEY, SINGLE_CACHE, MUC_CACHE
from conf.search_params_define import REGEX_TAG
from utils.redis_utils import redis_cli
from utils.pinyin_util import PinyinUtil
from utils.common_utils import TextHandler
from utils.regex_utils import chinese_pattern
from utils.redis_utils import RedisUtil
from utils.time_utils import TimeUtils
from utils.similar_util import get_similar_bool
time_utils = TimeUtils()
config = get_config_file()
pgconfig = config['postgresql']
host = pgconfig['host']
port = pgconfig['port']
user = pgconfig['user']
database = pgconfig['database']
password = pgconfig['password']
if_cached = config['cache'].getboolean('if_cache')
log_path = get_logger_file('sql.log')
sql_logger = configure_logger('sql', log_path)
if_async = None
user_data = []
# user_data = defaultdict(dict)
# all_user_data = {}
PY_VERSION = re.findall('^([\d\.].*?)\s', sys.version)[0]
DB_VERSION = None
# DB_VERSION = None
# PY_VERSION = PYTHON_VERSION
pinyin = PinyinUtil()
text_handler = TextHandler()
merge_list_of_dict = text_handler.merge_list_of_dict
formulate_text = text_handler.formulate_text
formulate_text_to_uid = text_handler.formulate_text_to_uid
symbol_to_english = text_handler.symbol_to_english
class UserLib:
def __init__(self, user_id=None):
# global all_user_data
global domain
self.conn = psycopg2.connect(host=host, database=database, user=user, password=password, port=port)
self.conn.autocommit = True
__domain = None
self.user_data = {}
if user_id and '@' in user_id:
__domain = user_id.split('@')[1]
# user_data = all_user_data.get(__domain, {})
# 制作redis里所有用户的缓存
if not self.user_data and self.user_data is not None:
cache_redis_cli = RedisUtil()
self.user_data = cache_redis_cli.get_all_user_data(domain=__domain)
if not self.user_data:
sql_logger.info("no user data in redis, making one into it..")
self.user_data = self.get_user_data(domain=__domain)
if self.user_data:
cache_redis_cli.set_all_user_data(data=self.user_data, domain=__domain)
sql_logger.info("redis user data set..")
else:
sql_logger.error("NO USER FOUND IN POSTGRESQL!!")
self.user_data = None
if self.user_data is None:
sql_logger.error("POSTGRESQL STILL NOT SET, IF SET, PLEASE RESTART SERVICE")
raise ConnectionError("POSTGRESQL IS NOT CONNECTED BECAUSE NO USER FOUND")
else:
if isinstance(domain, str):
__domain = domain
elif isinstance(domain, list):
__domain = None
else:
raise ValueError("GET FIND DOMAIN FOR USER {}".format(user_id))
self.domain = __domain
def get_msg_id(self, msgid, msgtype):
res = None
conn = self.conn
msg_table = 'msg_history' if msgtype == 'message' else 'muc_room_history'
sql = """SELECT id FROM {} where msg_id = %(msgid)s limit 1;""".format(msg_table)
cursor = conn.cursor()
cursor.execute(sql, {'msgid': msgid})
rs = cursor.fetchall()
if len(rs):
res = rs[0][0]
else:
for i in range(0, 4):
time.sleep(0.5)
sql_logger.info('waiting .. {}'.format(i))
cursor.execute(sql, {'msgid': msgid})
rs = cursor.fetchall()
if not len(rs):
if i == 3:
res = None
break
continue
res = rs[0][0]
if res:
break
cursor.close()
return res
def get_msg_by_msg_ids(self, msgids, msgtype):
res = None
s_result = []
conn = self.conn
if msgtype in ['chat', 'consult']:
sql = """select m_body, id, msg_id from msg_history where msg_id = ANY(%(msgids)s) order by id asc"""
else:
sql = """select packet, id, msg_id from muc_room_history where msg_id = ANY(%(msgids)s) order by id asc"""
cursor = conn.cursor()
cursor.execute(sql, {'msgids': msgids})
rs = cursor.fetchall()
for row in rs:
row = ['' if x is None else x for x in row]
res = dict()
res['body'] = row[0]
res['id'] = row[1]
res['msg_id'] = row[2]
s_result.append(res)
cursor.close()
return s_result
def get_domain(self):
res = []
conn = self.conn
sql = """select host from host_info"""
cursor = conn.cursor()
cursor.execute(sql)
rs = cursor.fetchall()
for row in rs:
if len(row) == 1:
res.append(row[0])
cursor.close()
return res
def get_user_data(self, domain=''):
s_result = defaultdict(dict)
conn = self.conn
sql = """select b.username || '@' || b.host as user_id, user_name, pinyin, b.url,a.department,b.mood from host_users a left join vcard_version b on a.user_id = b.username where a.hire_flag = 1 and a.host_id = ANY(select id from host_info where host = %(domain)s )"""
cursor = conn.cursor()
cursor.execute(sql, {'domain': domain})
rs = cursor.fetchall()
for row in rs:
row = ['' if x is None else x for x in row]
res = dict()
res['i'] = row[0]
res['n'] = row[1]
res['p'] = row[2]
res['u'] = row[3]
res['d'] = row[4]
res['m'] = row[5]
s_result[row[0]] = res
cursor.close()
return s_result
def get_user_mucs(self, user_id, user_domain=''):
if '@' in user_id:
user_s_name = user_id.split('@')[0]
user_domain = user_id.split('@')[1]
else:
sql_logger.error('SEARCH USER WITHOUT DOMAIN')
return []
s_result = []
conn = self.conn
sql = "SELECT muc_name||'@'||domain from user_register_mucs where username = %(user_s_name)s and registed_flag = 1 and host = %(user_domain)s"
cursor = conn.cursor()
cursor.execute(sql, {'user_s_name': user_s_name, 'user_domain': user_domain})
rs = cursor.fetchall()
for row in rs:
row = ['' if x is None else x for x in row]
s_result.append(row[0])
cursor.close()
return user_data
def close(self):
if self.conn:
self.conn.close()
def get_db_version(self):
_version = False
conn = self.conn
sql = "SELECT version();"
cursor = conn.cursor()
cursor.execute(sql)
rs = cursor.fetchall()
for row in rs:
row = ['' if x is None else x for x in row]
_version = row[0]
_result = re.findall('postgresql\s(\d.*?)\son', _version.lower())
if _result and len(_result) != 0:
_version = _result[0]
else:
_version = False
cursor.close()
return _version
def get_habit(self, key, habit, form, user, origin=False, common=False):
if '@' in user:
user_s_name = user.split('@')[0]
user_domain = user.split('@')[1]
else:
sql_logger.error('SEARCH USER WITHOUT DOMAIN')
return []
if not self.user_data:
self.user_data = self.get_user_data(user_domain)
if self.user_data:
cache_redis_cli = RedisUtil()
cache_redis_cli.set_all_user_data(data=self.user_data, domain=user_domain)
sql_logger.info("redis user data set..")
raw_key = key
key = symbol_to_english(key)
result = []
# 搜人的拼音和userid
if form == 'single':
_k = SINGLE_CACHE + '_' + user
__user_data = redis_cli.get(_k)
if __user_data:
__user_data = json.loads(__user_data)
elif habit[SINGLE_TRACE_KEY] or habit[SINGLE_KEY]:
# user_list = set(habit[SINGLE_TRACE_KEY] + habit[SINGLE_KEY]) # 这里只要userid 不要domain
user_list = habit[SINGLE_KEY] + list(
filter(lambda x: x not in habit[SINGLE_KEY], habit[SINGLE_TRACE_KEY]))
__user_data = self.single_habit_data(data=user_list, user_domain=user_domain)
try:
__user_data = sorted(__user_data, key=lambda x: user_list.index(x.get('uri', '')))
# __user_data = sorted(__user_data, key=lambda x: user_list.index(x.get('qtalkname', '')))
except ValueError:
sql_logger.exception("ORDER PROBLEM : NOT IN LIST")
redis_cli.set(name=_k, value=json.dumps(__user_data, ensure_ascii=False), ex=60)
if __user_data:
sql_logger.debug('user data {}'.format(__user_data))
# 纯中文
if not chinese_pattern.sub('', key):
sql_logger.debug('修正前 {}'.format(key))
key = formulate_text(key) # 只保留中文
sql_logger.debug('修正为标点 {}'.format(key))
_r1 = list((filter(lambda x: key in x['name'], __user_data)))
# 这里x['name'] 需要是string
_r2 = list(filter(lambda x: get_similar_bool(key, x['name']), __user_data))
result = merge_list_of_dict(_r1, _r2)
# 搜索userid 此处不考虑相似度 只全匹配
elif ('.' in key) or ('_' in key) or ('-' in key):
sql_logger.debug('修正前 {}'.format(key))
# key = formulate_text_to_uid(key)
sql_logger.debug('修正为标点 {}'.format(key))
sql_logger.debug('user data {}'.format(__user_data))
# result = set(filter(lambda x: key in x['qtalkname'], __user_data))
result = merge_list_of_dict(list((filter(lambda x: key in x['qtalkname'], __user_data))))
elif chinese_pattern.findall(formulate_text(key)) and chinese_pattern.sub('', formulate_text(key)): # 中英符号结合
key = formulate_text(key)
_r1 = list(filter(lambda x: key in formulate_text(x['name']), __user_data)) # 何靖宇
_r2 = list(filter(lambda x: get_similar_bool(a=key, b=x['name']), __user_data))
chinese_words = chinese_pattern.findall(key)
sql_logger.debug('中文结果 {}'.format(chinese_words))
__k = list(map(lambda x: pinyin.get_pinyin(x), chinese_words))
test = {f: t for f, t in zip(chinese_words, __k)}.items()
for i in test:
key = key.replace(i[0], i[1])
sql_logger.debug('转换后 {}'.format(key))
_r3 = list(filter(lambda x: key in formulate_text(x['pinyin']), __user_data))
result = merge_list_of_dict(_r1, _r2, _r3)
else: # 纯英文
sql_logger.debug('修正前 {}'.format(key))
key = formulate_text(key)
sql_logger.debug('修正为标点 {}'.format(key))
sql_logger.debug('JU RAN YOU user data {}'.format(__user_data))
_r1 = list(filter(lambda x: key in x['qtalkname'], __user_data)) # jingyu.he
_r2 = list(filter(lambda x: key in formulate_text(x['pinyin']), __user_data))
_r3 = list(filter(lambda x: get_similar_bool(a=key, b=x['qtalkname']), __user_data))
_r4 = list(filter(lambda x: get_similar_bool(a=key, b=x['pinyin']), __user_data))
result = merge_list_of_dict(_r1, _r2, _r3, _r4)
sql_logger.debug('user data for result {}'.format(result))
sql_logger.debug('PUTTING INTO REDIS {}'.format(__user_data))
# 搜群的id 和拼音 和 title
elif form == 'muc':
# key = formulate_text_to_uid(key)
_k = MUC_CACHE + '_' + user
__muc_data = redis_cli.get(_k)
if __muc_data:
__muc_data = json.loads(__muc_data)
elif habit[MUC_TRACE_KEY] or habit[MUC_KEY]:
# muc_list = set(habit[MUC_TRACE_KEY] + habit[MUC_KEY]) # 这里只要userid 不要domain
muc_list = habit[MUC_KEY] + list(filter(lambda x: x not in habit[MUC_KEY], habit[MUC_TRACE_KEY]))
__muc_data = self.muc_habit_data(muc_list, user=user)
try:
__muc_data = sorted(__muc_data, key=lambda x: muc_list.index(x.get('uri')))
except ValueError as e:
sql_logger.exception("ORDER PROBLEM : NOT IN LIST")
redis_cli.set(name=_k, value=json.dumps(__muc_data, ensure_ascii=False), ex=60)
if __muc_data:
sql_logger.debug('muc data {}'.format(__muc_data))
# 纯中文
__muc_list = [x.get('uri') for x in __muc_data]
if not chinese_pattern.sub('', key):
# result = set(filter(lambda x: key in x['label'], __muc_data))
_r1 = list(filter(lambda x: get_similar_bool(a=key, b=x['label']), __muc_data))
if common:
_r2 = list((filter(lambda x: key in x['label'], __muc_data)))
_r3 = self.search_group(user_id=user, username=raw_key, limit=len(__muc_list), offset=0,
habit='', exclude=__muc_list, origin=origin, common=common,
from_habit=True)
_r3 = sorted(_r2, key=lambda x: __muc_list.index(x.get('uri')))
result = merge_list_of_dict(_r1, _r2, _r3)
else:
_r2 = list((filter(lambda x: key in x['label'], __muc_data)))
result = merge_list_of_dict(_r1, _r2)
# 搜索userid
# 搜索群id
elif chinese_pattern.sub('', formulate_text(key)):
key = formulate_text(key)
for __d in __muc_data:
__d['label'] = formulate_text(__d['label'])
_r1 = list(filter(lambda x: key in x['label'], __muc_data))
sql_logger.debug('R1 {}'.format(_r1))
# 群名称的拼音 后续撤掉
# 先取每个结果的label 得到[拼音,首字母]的结果 之后用map分别得到key是否在里 再用reduce进行或操作
_r2 = list(filter(lambda x: reduce(lambda a, b: a + b, list(
map(lambda x: True if key in x else False, pinyin.get_all(x['label'])))),
__muc_data))
chinese_words = chinese_pattern.findall(key)
__k = list(map(lambda x: pinyin.get_pinyin(x), chinese_words))
test = {f: t for f, t in zip(chinese_words, __k)}.items()
for i in test:
key = key.replace(i[0], i[1])
_r3 = list(filter(lambda x: reduce(lambda a, b: a + b, list(
map(lambda x: True if key in x else False, pinyin.get_all(x['label'])))),
__muc_data))
if common:
_r4 = self.search_group(user_id=user, username=raw_key, limit=len(__muc_list), offset=0,
habit='', exclude=__muc_list, origin=origin, common=common,
from_habit=True)
result = merge_list_of_dict(_r1, _r2, _r3, _r4)
else:
result = merge_list_of_dict(_r1, _r2, _r3)
else:
for __d in __muc_data:
__d['label'] = formulate_text(__d['label'])
_r1 = list(filter(lambda x: key in x['label'], __muc_data))
sql_logger.debug('R1 {}'.format(_r1))
_r2 = list(filter(lambda x: key in x['uri'], __muc_data))
sql_logger.debug('R2 {}'.format(_r2))
# 群名称的拼音 后续撤掉
# 先取每个结果的label 得到[拼音,首字母]的结果 之后用map分别得到key是否在里 再用reduce进行或操作
_r3 = list(filter(lambda x: reduce(lambda a, b: a + b, list(
map(lambda x: True if key in x else False, pinyin.get_all(x['label'])))),
__muc_data))
if common:
_r4 = self.search_group(user_id=user, username=raw_key, limit=len(__muc_list), offset=0,
habit='', exclude=__muc_list,
from_habit=True)
result = merge_list_of_dict(_r1, _r2, _r3, _r4)
else:
result = merge_list_of_dict(_r1, _r2, _r3)
sql_logger.debug(
'PINYIN {}'.format([pinyin.get_all(x['label']) for x in __muc_data]))
sql_logger.debug('R3 {}'.format(_r3))
# self.close()
sql_logger.debug('returning result {}'.format(list(result)))
return list(result)
def single_habit_data(self, data, user_domain=''):
s_result = list()
conn = self.conn
data = list(map(lambda x: x.split('@')[0] if '@' in x else x, data))
sql = """SELECT aa.user_id, aa.department, aa.icon, aa.user_name, aa.mood,aa.pinyin FROM ( SELECT a.user_id, a.department, b.url AS icon, a.user_name, b.mood,a.pinyin FROM host_users a LEFT JOIN vcard_version b ON a.user_id = b.username WHERE a.hire_flag = 1 AND LOWER(a.user_type) != 's' AND a.user_id = ANY(%(user_lists)s) and a.host_id = ANY(select id from host_info where host = %(domain)s) ) aa """
cursor = conn.cursor()
cursor.execute(sql, {'user_lists': data, 'domain': user_domain})
rs = cursor.fetchall()
for row in rs:
res = dict()
row = ['' if x is None else x for x in row]
res['qtalkname'] = row[0]
res['uri'] = row[0] + '@' + domain
res['content'] = row[1]
res['icon'] = row[2]
res['name'] = row[3]
res['label'] = row[3] + '(' + row[0] + ')'
if row[4]:
res['label'] = res['label'] + ' - ' + row[4]
res['pinyin'] = row[5]
s_result.append(res)
cursor.close()
sql_logger.debug('SINGLE HABIT RESULT {}'.format(s_result))
return s_result
def muc_habit_data(self, data, user, user_domain=''):
s_result = list()
conn = self.conn
if '@' in user:
user_s_name = user.split('@')[0]
user_domain = user.split('@')[1]
else:
sql_logger.error('SEARCH USER WITHOUT DOMAIN')
return []
# s_data = set(map(lambda x: x + '@conference.' + domain, data))
s_data = data
sql = """select a.muc_name, a.domain, b.show_name, b.muc_title, b.muc_pic from user_register_mucs as a left join muc_vcard_info as b on concat(a.muc_name, '@', a.domain) = b.muc_name where a.registed_flag != 0 and a.username = %(user_id)s and (b.muc_name = ANY(%(muc_list)s)) and a.host = %(domain)s"""
cursor = conn.cursor()
cursor.execute(sql, {'muc_list': s_data, 'user_id': user_s_name, 'domain': user_domain})
rs = cursor.fetchall()
for row in rs:
row = ['' if x is None else x for x in row]
res = dict()
res['uri'] = row[0] + '@' + row[1]
res['label'] = row[2]
res['content'] = row[3]
res['icon'] = row[4]
s_result.append(res)
cursor.close()
sql_logger.debug('MUC HABIT {}'.format(s_result))
return s_result
def search_user(self, username, user_id, limit=5, offset=0, habit='', exclude=None):
s_result = list()
conn = self.conn
exclude_list = []
if '@' in user_id:
user_s_name = user_id.split('@')[0]
user_domain = user_id.split('@')[1]
else:
sql_logger.error('SEARCH USER WITHOUT DOMAIN')
return
regex_tag = username.startswith(REGEX_TAG)
if regex_tag:
search_model = '~'
username = username[1:]
else:
search_model = 'ilike'
username = '%{}%'.format(username)
if exclude:
exclude_list = {'{}'.format(x.get('qtalkname')) for x in exclude}
offset = offset - len(exclude)
if offset < 0:
offset = 0
if if_cached:
sql = """SELECT aa.user_id, aa.department, bb.url as icon, CASE WHEN aa.nick != '' THEN aa.nick ELSE aa.user_name END, bb.mood , aa.pinyin
FROM (
SELECT a.user_id, b.department, b.user_name, b.pinyin, a.nick
FROM (
SELECT uu.user_id || '@' || hh.host as user_id,'' as nick, uu.host_id as hostid
FROM host_users uu
LEFT JOIN host_info hh
ON uu.host_id = hh.id
WHERE uu.hire_flag = 1 AND LOWER(uu.user_type) != 's' AND uu.user_id <> ALL(%(exclude_list)s) AND (uu.user_id ILIKE %(username)s OR uu.user_name {search_model} %(username)s OR uu.pinyin ILIKE %(username)s) AND uu.host_id = ANY(select id from host_info where host = %(domain)s)
UNION
SELECT cc.subkey AS user_id, cc.configinfo as nick, hh.id as hostid
FROM client_config_sync cc
LEFT JOIN host_info hh
ON cc.host = hh.host
WHERE split_part(cc.subkey,'@',1) <> ALL(%(exclude_list)s) AND cc.username = %(user_s_name)s AND cc.configkey = 'kMarkupNames' AND cc.configinfo {search_model} %(username)s AND cc.host = %(domain)s
) a
LEFT JOIN host_users b
ON split_part(a.user_id,'@',1) = b.user_id AND a.hostid = b.host_id
) aa
LEFT JOIN vcard_version bb
ON aa.user_id = bb.username || '@' || bb.host
ORDER BY aa.user_id ASC LIMIT %(limit)s OFFSET %(offset)s"""
sql = sql.format(search_model=search_model)
injection = {'username': username, 'user_id': user_id, 'limit': limit, 'offset': offset,
'exclude_list': exclude_list, 'domain': user_domain, 'user_s_name': user_s_name}
else:
sql = """SELECT aa.user_id, aa.department, bb.url as icon, CASE WHEN aa.nick != '' THEN aa.nick ELSE aa.user_name END, bb.mood , aa.pinyin
FROM
(
SELECT a.user_id, b.department, b.user_name, b.pinyin, a.nick
FROM (
SELECT uu.user_id || '@' || hh.host as user_id,'' as nick, uu.host_id as hostid
FROM host_users uu
LEFT JOIN host_info hh
ON uu.host_id = hh.id
WHERE uu.hire_flag = 1 AND LOWER(uu.user_type) != 's' AND
( uu.user_id ILIKE %(username)s OR uu.user_name {search_model} %(username)s OR uu.pinyin ILIKE %(username)s ) AND uu.host_id = ANY(select id from host_info where host = %(domain)s )
UNION
SELECT cc.subkey AS user_id, cc.configinfo as nick, hh.id as hostid
FROM client_config_sync cc
LEFT JOIN host_info hh
ON cc.host = hh.host
WHERE cc.username = %(user_s_name)s AND cc.configkey = 'kMarkupNames' AND cc.configinfo {search_model} %(username)s AND cc.host = %(domain)s
) a
LEFT JOIN host_users b
ON split_part(a.user_id, '@', 1) = b.user_id AND a.hostid = b.host_id
) aa
LEFT JOIN vcard_version bb
ON aa.user_id = bb.username || '@' || bb.host
LEFT JOIN
(
SELECT CASE WHEN m_from || '@' || from_host = %(user_id)s THEN m_to || '@' || to_host ELSE m_from || '@' || from_host END AS contact, max(create_time) mx
FROM msg_history
WHERE (m_from = %(user_s_name)s and from_host = %(domain)s ) or (m_to = %(user_s_name)s and to_host = %(domain)s )
GROUP BY contact
) cc
ON aa.user_id = cc.contact
ORDER BY cc.mx DESC nulls last
LIMIT %(limit)s
OFFSET %(offset)s"""
sql = sql.format(search_model=search_model)
injection = {'username': username, 'user_id': user_id, 'limit': limit, 'offset': offset,
'domain': user_domain, 'user_s_name': user_s_name}
cursor = conn.cursor()
cursor.execute(sql, injection)
rs = cursor.fetchall()
for row in rs:
res = dict()
row = ['' if x is None else x for x in row]
res['qtalkname'] = row[0].split('@')[0]
res['uri'] = row[0]
res['content'] = row[1]
res['icon'] = row[2]
res['name'] = row[3]
res['label'] = row[3] + '(' + row[0] + ')'
if row[4]:
res['label'] = res['label'] + ' - ' + row[4]
res['pinyin'] = row[5]
s_result.append(res)
cursor.close()
if if_cached and habit:
sql_logger.debug('BEFORE HABIT REARRANGE {}\n HABIT {}'.format(s_result, habit))
s_result = self.sort_by_habit(data=s_result, habit=habit[SINGLE_KEY], name_key='qtalkname',
search_key=username)
sql_logger.debug('AFTER HABIT REARRANGE {}'.format(s_result))
elif if_cached and not habit:
sql_logger.error("CACHED BUT NO HABIT, userid : {user_id}, username : {username}}".format(user_id=user_id,
username=username))
if '.' in username and s_result:
tag = False
username = username + '@' + user_domain
for x in s_result:
if username == x.get('uri'):
__ = s_result.pop(x)
s_result = [__] + s_result
tag = True
if not tag and self.user_data:
__complete_match = self.user_data.get(username)
if __complete_match:
res = dict()
res['qtalkname'] = __complete_match['i'].split('@')[0]
res['uri'] = __complete_match['i']
res['content'] = __complete_match['d']
res['icon'] = __complete_match['u']
res['name'] = __complete_match['n']
res['label'] = __complete_match['n'] + '(' + __complete_match['i'] + ')'
if __complete_match['m']:
res['label'] = res['label'] + ' - ' + __complete_match['m']
res['pinyin'] = __complete_match['p']
s_result = [res] + s_result
sql_logger.debug('SINGLE HABIT {}'.format(s_result))
return s_result
def search_group(self, user_id, username, limit=5, offset=0, habit='', exclude=None, origin=True,
common=True, from_habit=False):
# todo 这里写的很丑 有时间可以优化一下
if '@' in user_id:
user = user_id
user_s_name = user_id.split('@')[0]
user_domain = user_id.split('@')[1]
else:
sql_logger.error('SEARCH USER WITHOUT DOMAIN')
return []
raw_key = username.strip()
regex_tag = username.startswith(REGEX_TAG)
if regex_tag:
search_model = '~'
username = username[1:]
else:
search_model = 'ilike'
username = '%{}%'.format(username)
raw_key = '%{}%'.format(raw_key)
__start_time = time.time()
s_result = list()
key = None
if not exclude:
exclude = []
if common:
key = username
key = key.split()
_key_list = []
for _k in key:
if not chinese_pattern.sub('', _k):
if len(_k) >= 2:
_key_list.append(_k)
else:
if len(_k) > 3:
_key_list.append(_k)
# key = list(filter(lambda x: len(x) >= 2, key))
key = _key_list
if key:
if user_s_name in key:
if not key.remove(user_id):
return None
else:
return None
# 根据key来返回命中高亮是返回汉子还是id TODO: 需要直接通过优化sql选择
if not chinese_pattern.findall(''.join(key)):
ret_user_name = False
else:
ret_user_name = True
if not exclude:
exclude = []
offset = offset - len(exclude)
if offset < 0:
offset = 0
if from_habit:
sql = self.make_common_sql(keys=key, origin=False, common=True, habit_tag=True)
exclude_list = list(map(lambda x: x.split('@')[0], exclude))
else:
sql = self.make_common_sql(keys=key, origin=origin, common=common)
exclude_list = {'{}'.format(x.get('uri', '')) for x in exclude}
exclude_list = list(map(lambda x: x.split('@')[0], exclude_list))
sql = sql.format(search_model=search_model, searcher_domain_index=user_domain)
key_injection = {
'user_id': user_id,
'offset': offset,
'limit': limit,
'exclude_list': exclude_list,
'user_domain': user_domain,
'raw_key': raw_key,
'user_s_name': user_s_name
}
for _i, _k in enumerate(key):
__ = 'key_{}'.format(_i + 1)
key_injection = {**key_injection, **{__: _k}}
if from_habit:
injection = [*key, user_id, offset, limit, exclude_list, user_domain]
elif common and origin:
injection = [*key, user_id, offset, limit, exclude_list, username, user_domain]
elif common and not origin:
injection = [*key, user_id, offset, limit, exclude_list, user_domain]
elif not common and origin:
injection = [user_id, username, limit, offset, exclude_list, user_domain]
conn = self.conn
cursor = conn.cursor()
cursor.execute(sql, key_injection)
rs = cursor.fetchall()
for row in rs:
row = ['' if x is None else x for x in row]
res = dict()
# res['uri'] = row[0] + '@' + row[1]
res['uri'] = row[0]
res['label'] = row[2]
res['content'] = row[3]
res['icon'] = row[4]
__hits = []
from_common = False
from_name = False
if row[5]:
if isinstance(row[5], list):
for i in row[5]:
if i == ['']:
from_name = True
continue
if not i:
continue
if isinstance(i, str):
if '|' in i:
__hits.extend(i.split('|'))
else:
__hits.append(i)
elif isinstance(i, list):
for u in i:
if isinstance(u, str):
if '|' in u:
__hits.extend(u.split('|'))
else:
__hits.append(u)
elif isinstance(u, list):
for k in u:
if '|' in k:
__hits.extend(k.split('|'))
else:
__hits.append(k)
else:
raise TypeError("WRONG COMMON MEMBER HITS {}".format(row[5]))
# 应该不会出现是str的情况, 如果出现基本上也过不去长度的检测
elif isinstance(row[5], str):
if '|' in row[5]:
__hits.extend(row[5].split('|'))
else:
__hits.append(row[5])
else:
from_name = True
if __hits and len(__hits) >= len(key):
from_common = True
res['hit'] = __hits
elif __hits and len(__hits) < len(key):
from_common = False
if not from_name:
continue
if from_common and from_name:
res['todoType'] = 6
elif from_common and not from_name:
res['todoType'] = 4
elif not from_common and from_name:
res['todoType'] = 2
s_result.append(res)
cursor.close()
if not from_habit:
if if_cached and habit:
_habit = list(map(lambda x: x + '@conference.' + domain, habit[MUC_KEY]))
sql_logger.debug('BEFORE HABIT REARRANGE {}\n HABIT {}'.format(s_result, habit))
s_result = self.sort_by_habit(data=s_result, habit=_habit, name_key='uri')
sql_logger.debug('AFTER HABIT REARRANGE {}'.format(s_result))
# s_result = self.sort_by_habit(data=s_result, habit=habit[MUC_KEY], name_key='uri')
elif if_cached and not habit:
sql_logger.error(
"CACHED BUT NO HABIT, userid : {user_id}, username : {username}}".format(user_id=user_id,
username=username))
else:
_habit = list(map(lambda x: x + '@conference.' + domain, exclude_list))
# s_result = sorted(s_result, key=lambda x: [x for x in exclude_list].index(x))
sql_logger.debug('BEFORE HABIT REARRANGE {}\n HABIT {}'.format(s_result, habit))
s_result = self.sort_by_habit(data=s_result, habit=_habit, name_key='uri')
sql_logger.debug('AFTER HABIT REARRANGE {}'.format(s_result))
if common and ret_user_name and self.user_data:
for _r in s_result:
hits = _r.get('hit', '')
if not hits:
break
if isinstance(hits, list):
# name = [x.get('n') for x in user_data if x.get('i') in hits]
# name = [user_data.get(x.split('@')[0]).get('n', '') for x in hits]
name = [self.user_data.get(x, {}).get('n', '') for x in hits]
else:
# name = [x.get('n') for x in user_data if x.get('i') == hits]
name = [self.user_data.get(x, {}).get('n', '') for x in hits]
_r['hit'] = name
sql_logger.debug('GROUP RESULT {}'.format(s_result))
__end_time = time.time()
sql_logger.info("SEARCH GROUP USED {}".format(__end_time - __start_time))
return s_result
def search_group_by_single(self, user_id, key, limit=5, offset=0, habit='', exclude=None):
if not isinstance(conference_str, str):
if '@' in user_id:
conference_str = 'conference.' + user_id.split('@')[1]
else:
raise TypeError("CANT DETERMINE DOMAIN FOR SEARCH conference_str {}".format(conference_str))
key = key.split()
key = list(filter(lambda x: len(x) > 2, key))
if key:
if user_id in key:
if not key.remove(user_id):
return None
else:
return None
key_count = len(key)
s_result = list()
conn = self.conn
if if_cached:
sql = """SELECT A.muc_name, A.domain, B.show_name, B.muc_title, B.muc_pic FROM ( SELECT muc_name, domain FROM user_register_mucs WHERE username = %(user_id)s AND registed_flag != 0 AND muc_name IN ( SELECT muc_name FROM user_register_mucs WHERE username IN ( SELECT user_id FROM host_users WHERE hire_flag = 1 AND (user_id ~ ANY(array[%(key_str)s]) OR user_name ~ ANY(array[%(key_str)s]) OR pinyin ~ ANY(array[%(key_str)s]))) GROUP BY muc_name HAVING COUNT(*) = %(key_count)s )) A JOIN muc_vcard_info B ON (A.muc_name || %(conference_str)s) = b.muc_name LIMIT %(limit)s OFFSET %(offset)s"""
else:
sql = """SELECT A.muc_room_name, B.show_name, B.muc_title, B.muc_pic FROM (SELECT muc_room_name, MAX(create_time) as max FROM muc_room_history aa RIGHT JOIN (SELECT muc_name FROM user_register_mucs WHERE username = %(user_id)s AND registed_flag != 0 AND muc_name in (SELECT muc_name FROM user_register_mucs WHERE username IN (SELECT user_id FROM host_users WHERE hire_flag = 1 AND (user_id ~ any(array[%(key_str)s]) OR user_name ~ any(array[%(key_str)s]) OR pinyin ~ any(array[%(key_str)s]))) GROUP BY muc_name HAVING COUNT(*) = %(key_count)s)) bb ON aa.muc_room_name = bb.muc_name GROUP BY muc_room_name ORDER BY max DESC nulls last LIMIT %(limit)s OFFSET %(offset)s) A JOIN muc_vcard_info B ON (a.muc_room_name || %(conference_str)s) = b.muc_name"""
cursor = conn.cursor()
cursor.execute(sql,
{'user_id': user_id, 'limit': limit, 'offset': offset, 'conference_str': '@' + conference_str,
'key_str': key, 'key_count': key_count})
rs = cursor.fetchall()
for row in rs:
row = ['' if x is None else x for x in row]
res = dict()
res['uri'] = row[0] + '@' + conference_str
res['label'] = row[1]
res['content'] = row[2]
res['icon'] = row[3]
s_result.append(res)
cursor.close()
if if_cached and habit:
_habit = list(map(lambda x: x + '@conference.' + domain, habit[MUC_KEY]))
sql_logger.debug('BEFORE HABIT REARRANGE {}\n HABIT {}'.format(s_result, habit))
s_result = self.sort_by_habit(data=s_result, habit=_habit, name_key='uri')
sql_logger.debug('AFTER HABIT REARRANGE {}'.format(s_result))
# s_result = self.sort_by_habit(data=s_result, habit=habit[MUC_KEY], name_key='uri')
elif if_cached and not habit:
sql_logger.error("CACHED BUT NO HABIT, userid : {user_id}, username : {username}}".format(user_id=user_id,
username=key))
sql_logger.debug('COMMON RESULT {}'.format(s_result))
return s_result
def history_user(self, user_id, term, offset, limit, to_user=None, time_range=None, agg_tag=False):
s_result = list()
conn = self.conn
if '@' in user_id:
user_s_name = user_id.split('@')[0]
user_domain = user_id.split('@')[1]
else:
sql_logger.error('SEARCH USER WITHOUT DOMAIN')
return []
regex_tag = term.startswith(REGEX_TAG)
if regex_tag:
search_model = '~'
term = term[1:]
else:
search_model = 'ilike'
term = '%{}%'.format(term)
if not agg_tag and to_user:
sql = """SELECT create_time as date, m_from, from_host as fromhost, realfrom, m_to , to_host as tohost, realto as realto, m_body as msg, msg_id
FROM msg_history
WHERE xpath('/message/body/text()',m_body::xml)::text {search_model} %(term)s {user_limit} {time_limit_start} {time_limit_end}
ORDER BY create_time DESC
OFFSET %(offset)s
LIMIT %(limit)s"""
sub_injection = {}
if to_user:
if isinstance(to_user, list):
user_limit = """AND (
(m_from = %(user_s_name)s and from_host = %(user_domain)s and m_to || '@' || to_host = ANY(%(to_user)s)
OR
(m_to = %(user_s_name)s and to_host = %(user_domain)s and m_from || '@' || from_host = ANY(%(to_user)s)
)"""
sub_injection['to_user'] = to_user
sub_injection['user_s_name'] = user_s_name
sub_injection['user_domain'] = user_domain
elif isinstance(to_user, str):
to_user_s_name = to_user.split('@')[0]
to_user_domain = to_user.split('@')[1]
user_limit = """AND (
(m_from = %(user_s_name)s and from_host = %(user_domain)s and m_to = %(to_user_s_name)s and to_host = %(to_user_domain)s )
OR
(m_to = %(user_s_name)s and to_host = %(user_domain)s and m_from = %(to_user_s_name)s and from_host = %(to_user_domain)s )
)"""
sub_injection['user_s_name'] = user_s_name
sub_injection['user_domain'] = user_domain
sub_injection['to_user_s_name'] = to_user_s_name
sub_injection['to_user_domain'] = to_user_domain
else:
user_limit = ''
else:
user_limit = "AND ((m_from = %(user_s_name)s and from_host = %(user_domain)s) or(m_to = %(user_s_name)s and to_host = %(user_domain)s))"
sub_injection['user_s_name'] = user_s_name
sub_injection['user_domain'] = user_domain
time_limit_start = ''
time_limit_end = ''
if time_range and isinstance(time_range, list):
if time_range[0]:
time_limit_start = "AND create_time > %(time_limit_starts)s"
sub_injection['time_limit_start'] = time_range[0]
if time_range[1]:
time_limit_end = "AND create_time < %(time_limit_starts)s"
sub_injection['time_limit_end'] = time_range[1]
sql = sql.format(user_limit=user_limit, time_limit_start=time_limit_start, time_limit_end=time_limit_end,
search_model=search_model)
cursor = conn.cursor()
cursor.execute(sql,
{**{'term': term, 'limit': limit, 'offset': offset, 'user_id': user_id},
**sub_injection})
rs = cursor.fetchall()
for row in rs:
row = ['' if x is None else x for x in row]
res = dict()
if row[0]:
res['date'] = row[0].strftime('%Y-%m-%d %H:%M:%S')
else:
res['date'] = ''
res['from'] = row[1] + '@' + row[2]
res['realfrom'] = row[3] if row[3] else res['from']
res['to'] = row[4] + '@' + row[5]
res['realto'] = row[6] if row[6] else res['to']
res['msg'] = row[7]
res['msgid'] = row[8]
s_result.append(res)
cursor.close()
else:
sql = """SELECT a.count, b.create_time as date, b.m_from, b.from_host as fromhost, b.realfrom, b.m_to, b.to_host as tohost, b.realto, b.m_body as msg, a.conversation, b.msg_id, a.id FROM
(
SELECT count(1) as count, MAX(id) as id, m_from||'@'||from_host || '_' || m_to||'@'||to_host as conversation
FROM msg_history
WHERE xpath('/message/body/text()',m_body::xml)::text {search_model} %(term)s AND ( (m_from = %(user_s_name)s and from_host = %(user_domain)s) or (m_to = %(user_s_name)s and to_host = %(user_domain)s) {time_limit_start} {time_limit_end})
GROUP BY m_from||'@'||from_host || '_' || m_to||'@'||to_host
ORDER BY id desc
OFFSET %(offset)s
LIMIT %(limit)s
) a
LEFT JOIN msg_history b
ON a.id = b.id"""
sub_injection = {'user_s_name': user_s_name, 'user_domain': user_domain}
time_limit_start = ''
time_limit_end = ''
if time_range and isinstance(time_range, list):
if time_range[0]:
time_limit_start = "AND b.create_time > %(time_limit_starts)s"
sub_injection['time_limit_start'] = time_range[0]
if time_range[1]:
time_limit_end = "AND b.create_time < %(time_limit_starts)s"
sub_injection['time_limit_end'] = time_range[1]
sql = sql.format(time_limit_start=time_limit_start, time_limit_end=time_limit_end,
search_model=search_model)
cursor = conn.cursor()
cursor.execute(sql,
{**{'term': term, 'limit': limit, 'offset': offset, 'user_id': user_id},
**sub_injection})
rs = cursor.fetchall()
for row in rs:
row = ['' if x is None else x for x in row]
res = dict()
res['count'] = row[0]
if row[1]:
res['date'] = row[1].strftime('%Y-%m-%d %H:%M:%S')
else:
res['date'] = ''
res['from'] = row[2] + '@' + row[3]
res['realfrom'] = row[4] if row[4] else res['from']
res['to'] = row[5] + '@' + row[6]
res['realto'] = row[7] if row[7] else res['to']
res['msg'] = row[8]
res['conversation'] = row[9]
res['msgid'] = row[10]
res['id'] = row[11]
s_result.append(res)
cursor.close()
s_result = self.handle_sql_result(data=s_result)
return s_result
def history_muc(self, user_id, term, offset, limit, to_muc=None, time_range=None, agg_tag=False):
"""
:param user_id:
:param user_mucs:
:param term:
:param offset:
:param limit:
:param to_muc:
:param time_range:
:param agg_tag:
:return:
"""
s_result = list()
conn = self.conn
if '@' in user_id:
user_s_name = user_id.split('@')[0]
user_domain = user_id.split('@')[1]
else:
sql_logger.error('SEARCH USER WITHOUT DOMAIN')
return []
regex_tag = term.startswith(REGEX_TAG)
if regex_tag:
search_model = '~'
term = term[1:]
else:
search_model = 'ilike'
term = '%{}%'.format(term)
if not agg_tag and to_muc:
sql = """SELECT a.create_time as date, b.muc_name as _to, a.packet as msg, a.msg_id as msgid, b.show_name as label, b.muc_pic as icon
FROM muc_room_history a left join muc_vcard_info b
on a.muc_room_name = split_part(b.muc_name,'@',1)
WHERE xpath('/message/body/text()',packet::xml)::text {search_model} %(term)s {muc_limit} {time_limit_start} {time_limit_end}
ORDER BY create_time
OFFSET %(offset)s
LIMIT %(limit)s"""
sub_injection = {}
if to_muc:
if isinstance(to_muc, list):
muc_s_name = list(map(lambda x: x.split('@')[0], to_muc))
muc_limit = "AND muc_room_name = ANY(%(muc_s_name)s)"
sub_injection['muc_s_name'] = muc_s_name
elif isinstance(to_muc, str):
muc_s_name = to_muc.split('@')[0]
muc_limit = "AND muc_room_name = %(muc_s_name)s"
sub_injection['muc_s_name'] = muc_s_name
else:
muc_limit = ''
else:
muc_limit = "AND muc_room_name in (SELECT muc_name FROM user_register_mucs where username = %(user_s_name)s and registed_flag = 1 AND domain = 'conference.'||%(user_domain)s )"
sub_injection['user_s_name'] = user_s_name
sub_injection['user_domain'] = user_domain
time_limit_start = ''
time_limit_end = ''
if time_range and isinstance(time_range, list):
if time_range[0]:
time_limit_start = "AND create_time > %(time_limit_starts)s"
sub_injection['time_limit_start'] = time_range[0]
if time_range[1]:
time_limit_end = "AND create_time < %(time_limit_starts)s"
sub_injection['time_limit_end'] = time_range[1]
sql = sql.format(muc_limit=muc_limit, time_limit_start=time_limit_start, time_limit_end=time_limit_end,
search_model=search_model)
cursor = conn.cursor()
cursor.execute(sql,
{**{'term': term, 'limit': limit, 'offset': offset, 'user_id': user_id},
**sub_injection})
rs = cursor.fetchall()
for row in rs:
row = ['' if x is None else x for x in row]
res = dict()
if row[0]:
res['date'] = row[0].strftime('%Y-%m-%d %H:%M:%S')
else:
res['date'] = ''
res['to'] = row[1]
res['msg'] = row[2]
res['msgid'] = row[3]
res['from'] = ''
res['label'] = row[4]
res['icon'] = row[5]
s_result.append(res)
cursor.close()
else:
sql = """SELECT count, c.muc_name, b.msg_id, b.create_time as date, b.packet, c.show_name as label, c.muc_pic as icon , a.id FROM
(
SELECT count(1) as count, MAX(id) as id, muc_room_name
FROM muc_room_history
WHERE xpath('/message/body/text()',packet::xml)::text {search_model} %(term)s AND muc_room_name = ANY(SELECT muc_name FROM user_register_mucs where username = %(user_s_name)s and registed_flag = 1 AND host = %(user_domain)s and domain = 'conference.' || %(user_domain)s ) {time_limit_start} {time_limit_end}
GROUP BY muc_room_name
ORDER BY id desc
OFFSET %(offset)s
LIMIT %(limit)s
)a
LEFT JOIN muc_room_history b
ON a.id = b.id
LEFT JOIN muc_vcard_info c
on a.muc_room_name = split_part(c.muc_name,'@',1)"""
sub_injection = {'user_s_name': user_s_name, 'user_domain': user_domain}
time_limit_start = ''
time_limit_end = ''
if time_range and isinstance(time_range, list):
if time_range[0]:
time_limit_start = "AND create_time > %(time_limit_starts)s"
sub_injection['time_limit_start'] = time_range[0]
if time_range[1]:
time_limit_end = "AND create_time < %(time_limit_starts)s"
sub_injection['time_limit_end'] = time_range[1]
sql = sql.format(time_limit_start=time_limit_start, time_limit_end=time_limit_end,
search_model=search_model)
cursor = conn.cursor()
cursor.execute(sql,
{**{'term': term, 'limit': limit, 'offset': offset, 'user_id': user_id},
**sub_injection})
rs = cursor.fetchall()
for row in rs:
row = ['' if x is None else x for x in row]
res = dict()
res['count'] = row[0]
res['to'] = row[1]
res['msgid'] = row[2]
if row[3]:
res['date'] = row[3].strftime('%Y-%m-%d %H:%M:%S')
else:
res['date'] = ''
res['msg'] = row[4]
res['from'] = ''
res['label'] = row[5]
res['icon'] = row[6]
s_result.append(res)
cursor.close()
# s_result = self.handle_sql_result(data=s_result)
return s_result
def history_file(self, user_id, term, offset=0, limit=5, time_range=None):
s_result = list()
conn = self.conn
if '@' in user_id:
user_s_name = user_id.split('@')[0]
user_domain = user_id.split('@')[1]
else:
sql_logger.error('SEARCH USER WITHOUT DOMAIN')
return []
regex_tag = term.startswith(REGEX_TAG)
if regex_tag:
search_model = '~'
term = term[1:]
else:
search_model = 'ilike'
term = '%{}%'.format(term)
sql = """SELECT file, from_, pfv.muc_name as to_, date, msgid, pfv.show_name as label, pfv.muc_pic as icon, msg
FROM (
SELECT json(unnest(xpath('//body[@msgType="5"]/text()', packet::xml))::text) AS file, '' AS from_, muc_room_name AS to_, create_time AS date, msg_id AS msgid,packet as msg
FROM muc_room_history
WHERE muc_room_name IN (
SELECT muc_name
FROM user_register_mucs
WHERE username = %(user_s_name)s
AND registed_flag = 1
AND host = %(user_domain)s
)
) pfc left join muc_vcard_info pfv
on pfc.to_= split_part(pfv.muc_name,'@',1)
UNION ALL
SELECT file, from_, to_, date, msgid, pfb.user_name as label, pfv.url as icon, msg
FROM (
SELECT json(unnest(xpath('/message/body[@msgType="5"]/text()', m_body::xml))::text) AS file, m_from || '@' || from_host as from_
, m_to || '@' || to_host as to_, create_time AS date
, msg_id AS msgid, m_body as msg
FROM msg_history
WHERE (m_from = %(user_s_name)s AND from_host = %(user_domain)s )
OR (m_to = %(user_s_name)s AND to_host = %(user_domain)s )
) pfx left join vcard_version pfv
on split_part(pfx.from_,'@',1) = pfv.username
left join host_users pfb
on pfv.username = pfb.user_id and pfv.host = ANY(SELECT host from host_info WHERE id = pfb.host_id)
WHERE file ->> 'FileName' {search_model} %(term)s {time_limit_start} {time_limit_end}
ORDER BY date desc
OFFSET %(offset)s
LIMIT %(limit)s
"""
sub_injection = {'user_s_name': user_s_name, 'user_domain': user_domain}
time_limit_start = ''
time_limit_end = ''
if time_range and isinstance(time_range, list):
if time_range[0]:
time_limit_start = "AND create_time > %(time_limit_starts)s"
sub_injection['time_limit_start'] = time_range[0]
if time_range[1]:
time_limit_end = "AND create_time < %(time_limit_starts)s"
sub_injection['time_limit_end'] = time_range[1]
sql = sql.format(time_limit_start=time_limit_start, time_limit_end=time_limit_end, search_model=search_model)
cursor = conn.cursor()
cursor.execute(sql,
{**{'term': term, 'limit': limit, 'offset': offset, 'user_id': user_id},
**sub_injection})
rs = cursor.fetchall()
for row in rs:
row = ['' if x is None else x for x in row]
res = dict()
res['fileinfo'] = row[0]
res['from'] = row[1]
res['to'] = row[2]
if row[3]:
res['date'] = row[3].strftime('%Y-%m-%d %H:%M:%S')
else:
res['date'] = ''
res['msgid'] = row[4]
res['source'] = row[5] if row[5] else row[1]
res['icon'] = row[6]
res['msg'] = row[7]
res['mtype'] = 5
s_result.append(res)
cursor.close()
# s_result = self.handle_sql_result(data=s_result)
return s_result
def history_single_file(self, user_id, term, offset=0, limit=5):
s_result = list()
conn = self.conn
sql = """SELECT * from ( SELECT unnest(xpath('//body[@msgType="5"]/text()',m_body::xml))::text::json as file, m_from, m_to,create_time as epo, msg_id as msgid from msg_history where m_from = %(user_id)s or m_to = %(user_id)s) as pfx where pfx.file->>'FileName' ~ %(term)s order by pfx.time desc offset %(offset)s limit %(limit)s"""
cursor = conn.cursor()
cursor.execute(sql,
{'user_id': user_id, 'term': term, 'offset': offset, 'limit': limit})
rs = cursor.fetchall()
for row in rs:
row = ['' if x is None else x for x in row]
res = dict()
res['file'] = row[0]
res['from'] = row[1]
res['to'] = row[2]
res['time'] = row[3]
res['msgid'] = row[4]
res['domain'] = domain
res['chattype'] = 'chat' # chat groupchat
s_result.append(res)
# s_result = self.sort_by_habit(data=s_result, habit=_habit, name_key='uri') TODO sort by time, 返回之后添加count
cursor.close()
return s_result
def history_muc_file(self, user_id, term, muc_list, offset=0, limit=5):
s_result = list()
conn = self.conn
# sql = """SELECT * from ( SELECT unnest(xpath('//body[@msgType="5"]/text()',packet::xml))::text::json as file, nick as from, muc_room_name as to,create_time as time from muc_room_history where muc_room_name in ('dba632082f6b4c7f89159c47537df561')) as pfx where pfx.file->>'FileName' ~ '.apk' order by pfx.time desc offset 0 limit 2;"""
sql = """SELECT * from ( SELECT unnest(xpath('//body[@msgType="5"]/text()',packet::xml))::text::json as file, nick, muc_room_name, create_time as time from muc_room_history where muc_room_name in %(muc_list)s ) as pfx where pfx.file->>'FileName' ~ %(term)s order by pfx.time desc offset %(offset)s limit %(limit)s"""
cursor = conn.cursor()
cursor.execute(sql,
{'muc_lists': set(muc_list), 'term': term, 'offset': offset, 'limit': limit})
rs = cursor.fetchall()
for row in rs:
row = ['' if x is None else x for x in row]
res = dict()
res['file'] = row[0]
res['from'] = row[1]
res['to'] = row[2]
res['time'] = row[3]
res['msgid'] = row[4]
res['domain'] = domain
res['chattype'] = 'groupchat' # chat groupchat
s_result.append(res)
# s_result = self.sort_by_habit(data=s_result, habit=_habit, name_key='uri') TODO sort by time
cursor.close()
return s_result
def get_person_info(self, person):
result = {}
if '@' in person:
person = person.split('@')[0]
sql = """ select a.user_name,b.url from host_users a join vcard_version b on a.user_id = %(person)s and a.user_id = b.username;"""
conn = self.conn
cursor = conn.cursor()
cursor.execute(sql, {'person': person})
rs = cursor.fetchall()
for row in rs:
row = ['' if x is None else x for x in row]
result['show_name'] = row[0]
result['url'] = row[1]
cursor.close()
return result
def get_mucs_info(self, muc):
result = {}
if '@' not in muc and isinstance(conference_str, str):
muc = muc + '@' + conference_str
sql = """select show_name,muc_pic from muc_vcard_info where muc_name = %(muc)s"""
conn = self.conn
cursor = conn.cursor()
cursor.execute(sql, {'muc': muc})
rs = cursor.fetchall()
for row in rs:
row = ['' if x is None else x for x in row]
result['show_name'] = row[0]
result['muc_pic'] = row[1]
# TODO SORT!!!!
cursor.close()
return result
@staticmethod
def handle_sql_result(data):
result = {}
for hit in data:
a = hit['conversation'].split('_')[0]
b = hit['conversation'].split('_')[1]
conv = sorted([a, b])[0] + '_' + sorted([a, b])[1]
if conv in result.keys():
_temp = result[conv]
if _temp['id'] > hit['id']:
result[conv]['count'] += hit['count']
else:
result[conv] = hit
result[conv]['count'] += _temp['count']
else:
result[conv] = hit
result = sorted(list(result.values()), key=lambda x: x.get('id'))
return result
@staticmethod
def sort_by_habit(data, habit, name_key, search_key=''):
if not name_key:
sql_logger.warning("NO NAME_KEY FOUND :{}".format(data))
if not isinstance(data, list):
sql_logger.error("DATA NOT A LIST :{data}".format(data=data))
return data
if not isinstance(habit, (set, list)):
sql_logger.error("HABIT NOT A LIST :{habit}".format(habit=habit))
return data
for _h in habit:
# for _h in habit[::-1]:
name_list = [x[name_key] for x in data]
if search_key:
if search_key in _h and _h not in name_list:
sql_logger.warning("SHOULD ADD {} TO KEY {} RESULT {}".format(_h, search_key, data))
# if name_key == ''
if _h in name_list:
_t = data.pop(name_list.index(_h))
data = [_t] + data
return data
@staticmethod
def make_common_sql(keys, origin=True, common=True, habit_tag=False):
sql = ""
if common:
if not keys:
return
case_pattern = []
union_pattern = []
key_len = len(keys)
for i, k in enumerate(keys):
case_pattern.append(
"""SELECT %(key_{i})s, user_id
FROM host_users
WHERE hire_flag = 1 AND user_id != %(user_s_name)s AND ( user_id ilike %(key_{i})s OR user_name {search_model} %(key_{i})s OR pinyin ilike %(key_{i})s ) AND host_id = ANY(SELECT id FROM host_info WHERE host = %(user_domain)s )""".format(
i=i + 1, search_model='{search_model}'))
union_pattern.append("""SELECT a.muc_name|| '@' || a.domain as muc_name, string_agg(a.username||'@'||a.host, '|') as hit, max(a.created_at) as time
FROM user_register_mucs a JOIN tmp2 b ON a.muc_name = b.muc_name
WHERE username IN (select user_id from tmp where key = %(key_{i})s ) and a.registed_flag != 0 AND a.domain = 'conference.' || %(user_domain)s
group by a.muc_name || '@' || a.domain""".format(i=i + 1))
# keys 对应 $1 ... ${len(keys)}
# 之后是searcher_index 序号为len + 1
# 然后是群组字符串 len + 2
# 然后是offset len + 3
# limit len + 4
# 排除在 len + 5
# 名字查询的key 在 len + 6
# domain 在 len + 7
if habit_tag:
sql = """
WITH tmp (key, user_id) AS (
{keys_pattern}
),
tmp2 (muc_name, domain, created_at) AS (
SELECT split_part(muc_name || '@' || domain,'@',1) ,split_part(muc_name || '@' || domain,'@',2), max(created_at) as created_at
FROM user_register_mucs
WHERE username = %(user_s_name)s AND host = %(user_domain)s
AND registed_flag != 0 AND muc_name = ANY(%(exclude_list)s) AND domain = 'conference.' || %(user_domain)s
GROUP BY muc_name || '@' || domain
)
SELECT
aa.mucname,
split_part(bb.muc_name, '@', 2) AS domain,
bb.show_name,
bb.muc_title,
bb.muc_pic,
aa.tag
FROM (
SELECT mucname, tag from (
SELECT muc_name AS mucname, array_agg(hit) AS tag
FROM (
{select_pattern}
) foo
GROUP BY muc_name
HAVING COUNT(muc_name) = {length}
) boo
) aa
JOIN muc_vcard_info bb
ON (aa.mucname) = bb.muc_name
offset %(offset)s limit %(limit)s""".format(
keys_pattern=' union all '.join(case_pattern),
select_pattern=' union all '.join(union_pattern),
length=key_len, search_model='{search_model}')
return sql
if origin and common:
# format 填空
sql = """
WITH tmp (key, user_id) AS (
{keys_pattern}
),
tmp2 (muc_name, domain, created_at) AS (
SELECT split_part(muc_name || '@' || domain,'@',1) ,split_part(muc_name || '@' || domain,'@',2), max(created_at) as created_at
FROM user_register_mucs
WHERE username = %(user_s_name)s AND host = %(user_domain)s
AND registed_flag != 0 AND muc_name <> ALL (%(exclude_list)s) AND domain = 'conference.' || %(user_domain)s
GROUP BY muc_name || '@' || domain
)
SELECT
aa.mucname,
split_part(bb.muc_name, '@', 2) AS domain,
bb.show_name,
bb.muc_title,
bb.muc_pic,
aa.tag
FROM (
SELECT mucname, array_agg(tag) AS tag, MAX(time) as time
FROM(
SELECT mucname, tag, time from (
SELECT muc_name AS mucname, array_agg(hit) AS tag, max(time) as time
FROM (
{select_pattern}
) foo
GROUP BY muc_name
HAVING COUNT(muc_name) = {length}
) boo
union all
select a.muc_name|| '@' || a.domain as muccname, array[''] as hit, a.created_at as time
from tmp2 a join muc_vcard_info b on concat(a.muc_name, '@', a.domain) = b.muc_name
where (b.show_name {search_model} %(raw_key)s or b.muc_name ilike %(raw_key)s )
) poo
GROUP BY mucname
) aa
JOIN muc_vcard_info bb
ON aa.mucname = bb.muc_name
ORDER BY time DESC
offset %(offset)s limit %(limit)s""".format(
keys_pattern=' union all '.join(case_pattern),
select_pattern=' union all '.join(union_pattern),
length=key_len, search_model='{search_model}')
elif common and not origin:
sql = """
WITH tmp (key, user_id) AS (
{keys_pattern}
),
tmp2 (muc_name, domain, created_at) AS (
SELECT split_part(muc_name || '@' || domain,'@',1) ,split_part(muc_name || '@' || domain,'@',2), max(created_at) as created_at
FROM user_register_mucs
WHERE username = %(user_s_name)s AND host = %(user_domain)s
AND registed_flag != 0 AND muc_name <> ALL ( %(exclude_list)s ) AND domain = 'conference.' || %(user_domain)s
GROUP BY muc_name || '@' || domain
)
SELECT
aa.mucname,
split_part(bb.muc_name, '@', 2) AS domain,
bb.show_name,
bb.muc_title,
bb.muc_pic,
aa.tag
FROM (
SELECT mucname, tag, time from (
SELECT muc_name AS mucname, array_agg(hit) AS tag, max(time) as time
FROM (
{select_pattern}
) foo
GROUP BY muc_name
HAVING COUNT(muc_name) = {length}
) boo
) aa
JOIN muc_vcard_info bb
ON aa.mucname = bb.muc_name
ORDER BY time DESC
offset %(offset)s limit %(limit)s""".format(
keys_pattern=' union all '.join(case_pattern),
select_pattern=' union all '.join(union_pattern),
search_model='{search_model}', length=key_len)
elif not common and origin:
sql = """SELECT
b.muc_name as mucname, split_part(b.muc_name,'@',2) as domain, b.show_name, b.muc_title, b.muc_pic, array['']
FROM
user_register_mucs as a left join muc_vcard_info as b
ON
concat(a.muc_name, '@', a.domain) = b.muc_name
WHERE
a.registed_flag != 0 and a.username = %(user_s_name)s and a.host = %(user_domain)s and (b.show_name {search_model} %(raw_key)s or b.muc_name ~ %(raw_key)s) and b.muc_name <> ALL (%(exclude_list)s) AND domain = {searcher_domain_index}
order by b.update_time desc offset %(offset)s limit %(limit)s"""
return sql
class AsyncLib:
def __init__(self, user_id):
global PY_VERSION, DB_VERSION, if_async
self.conn_str = 'postgres://{user}:{password}@{host}:{port}/{database}'.format(host=host,
database=database,
user=user,
password=password,
port=port)
self.user_data = {}
if user_id and '@' in user_id:
__domain = user_id.split('@')[1]
else:
raise ValueError("NO DOMAIN FOUND IN ASYNC PG CONSTRUCTOR USERID {}".format(user_id))
if not self.user_data and self.user_data is not None:
cache_redis_cli = RedisUtil()
self.user_data = cache_redis_cli.get_all_user_data(domain=__domain)
if not self.user_data:
__user_lib = UserLib()
self.user_data = __user_lib.get_user_data(domain=__domain)
__user_lib.close()
if self.user_data:
cache_redis_cli.set_all_user_data(data=self.user_data, domain=__domain)
sql_logger.info("redis user data set..")
sql_logger.info("no user data in redis, making one into it..")
if self.user_data:
cache_redis_cli.set_all_user_data(data=self.user_data, domain=__domain)
sql_logger.info("redis user data set..")
else:
sql_logger.error("NO USER FOUND IN POSTGRESQL!!")
self.user_data = None
if self.user_data is None:
sql_logger.error("POSTGRESQL STILL NOT SET, IF SET, PLEASE RESTART SERVICE")
raise ConnectionError("POSTGRESQL IS NOT CONNECTED BECAUSE NO USER FOUND")
def close(self):
return
async def get_user_data(self, domain=''):
"""
aa.user_id, aa.department, aa.icon, aa.user_name, aa.mood, aa.pinyin
:return:
i n p u d m
id name pinyin url department mood
"""
s_result = defaultdict(dict)
sql = """select b.username || '@' || b.host as user_id, a.user_name,a.pinyin, b.url, a.department, b.mood from host_users a left join vcard_version b on a.user_id = b.username where a.hire_flag = 1 and a.host_id = ANY(select id from host_info where host = $1)"""
pgconn = await asyncpg.connect(self.conn_str)
stmt = await pgconn.prepare(sql)
for (user_id, user_name, pinyin, url, department, mood) in await stmt.fetch(domain):
row = [user_id, user_name, pinyin, url, department, mood]
row = ['' if x is None else x for x in row]
res = dict()
res['i'] = row[0]
res['n'] = row[1]
res['p'] = row[2]
res['u'] = row[3]
res['d'] = row[4]
res['m'] = row[5]
s_result[row[0]] = res
await pgconn.close()
return s_result
async def get_user_mucs(self, user_id, user_domain=''):
if '@' in user_id:
user_s_name = user_id.split('@')[0]
user_domain = user_id.split('@')[1]
else:
sql_logger.error('SEARCH USER WITHOUT DOMAIN')
return []
s_result = []
sql = "SELECT muc_name||'@'||domain from user_register_mucs where username = $1 and registed_flag = 1 and host = $2"
pgconn = await asyncpg.connect(self.conn_str)
injection = [user_s_name, user_domain]
stmt = await pgconn.prepare(sql)
for (_muc) in await stmt.fetch(*injection):
s_result.append(_muc[0])
await pgconn.close()
return s_result
async def get_habit(self, key, habit, form, user, origin=False, common=False):
if '@' in user:
user_s_name = user.split('@')[0]
user_domain = user.split('@')[1]
else:
sql_logger.error('SEARCH USER WITHOUT DOMAIN')
return []
if not self.user_data:
self.user_data = await self.get_user_data(user_domain)
if self.user_data:
cache_redis_cli = RedisUtil()
cache_redis_cli.set_all_user_data(data=self.user_data, domain=user_domain)
sql_logger.info("redis user data set..")
raw_key = key
# 圆角转半角
key = symbol_to_english(key)
result = []
# 搜人的拼音和userid
if form == 'single':
_k = SINGLE_CACHE + '_' + user
__user_data = redis_cli.get(_k)
if __user_data:
__user_data = json.loads(__user_data)
elif habit[SINGLE_TRACE_KEY] or habit[SINGLE_KEY]:
# user_list = set(habit[SINGLE_TRACE_KEY] + habit[SINGLE_KEY]) # 这里只要userid 不要domain
user_list = habit[SINGLE_KEY] + list(
filter(lambda x: x not in habit[SINGLE_KEY], habit[SINGLE_TRACE_KEY]))
sql_logger.debug('WATCH ORDER {}'.format(user_list))
__user_data = await self.single_habit_data(user_list, user_domain)
try:
# __user_data = sorted(__user_data, key=lambda x: user_list.index(x.get('qtalkname', '')))
__user_data = sorted(__user_data, key=lambda x: user_list.index(x.get('uri', '')))
except ValueError:
sql_logger.exception("ORDER PROBLEM : NOT IN LIST")
sql_logger.debug('WATCH ORDER {}'.format(__user_data))
redis_cli.set(name=_k, value=json.dumps(__user_data, ensure_ascii=False), ex=60)
if __user_data:
sql_logger.debug('user data {}'.format(__user_data))
# 纯中文
if not chinese_pattern.sub('', key):
sql_logger.debug('修正前 {}'.format(key))
key = formulate_text(key) # 只保留中文
sql_logger.debug('修正为标点 {}'.format(key))
_r1 = list((filter(lambda x: key in x['name'], __user_data)))
# 这里x['name'] 需要是string
_r2 = list(filter(lambda x: get_similar_bool(key, x['name']), __user_data))
result = merge_list_of_dict(_r1, _r2)
# 搜索userid 此处不考虑相似度 只全匹配
elif ('.' in key) or ('_' in key) or ('-' in key):
sql_logger.debug('修正前 {}'.format(key))
# key = formulate_text_to_uid(key)
sql_logger.debug('修正为标点 {}'.format(key))
sql_logger.debug('user data {}'.format(__user_data))
# result = set(filter(lambda x: key in x['qtalkname'], __user_data))
result = merge_list_of_dict(list((filter(lambda x: key in x['qtalkname'], __user_data))))
elif chinese_pattern.findall(formulate_text(key)) and chinese_pattern.sub('', formulate_text(key)): # 中英符号结合
key = formulate_text(key)
_r1 = list(filter(lambda x: key in formulate_text(x['name']), __user_data)) # 何靖宇
_r2 = list(filter(lambda x: get_similar_bool(a=key, b=x['name']), __user_data))
sql_logger.debug('r1 {}'.format(_r1))
sql_logger.debug('r2 {}'.format(_r2))
chinese_words = chinese_pattern.findall(key)
sql_logger.debug('中文结果 {}'.format(chinese_words))
__k = list(map(lambda x: pinyin.get_pinyin(x), chinese_words))
test = {f: t for f, t in zip(chinese_words, __k)}.items()
for i in test:
key = key.replace(i[0], i[1])
sql_logger.debug('转换后 {}'.format(key))
_r3 = list(filter(lambda x: key in formulate_text(x['pinyin']), __user_data))
result = merge_list_of_dict(_r1, _r2, _r3)
else: # 纯英文
sql_logger.debug('修正前 {}'.format(key))
key = formulate_text(key)
sql_logger.debug('修正为标点 {}'.format(key))
sql_logger.debug('JU RAN YOU user data {}'.format(__user_data))
_r1 = list(filter(lambda x: key in x['qtalkname'], __user_data)) # jingyu.he
_r2 = list(filter(lambda x: key in formulate_text(x['pinyin']), __user_data))
_r3 = list(filter(lambda x: get_similar_bool(a=key, b=x['qtalkname']), __user_data))
_r4 = list(filter(lambda x: get_similar_bool(a=key, b=x['pinyin']), __user_data))
result = merge_list_of_dict(_r1, _r2, _r3, _r4)
sql_logger.debug('user data for result {}'.format(result))
sql_logger.debug('PUTTING INTO REDIS {}'.format(__user_data))
# 搜群的id 和拼音 和 title
elif form == 'muc':
# key = formulate_text_to_uid(key)
_k = MUC_CACHE + '_' + user
__muc_data = redis_cli.get(_k)
if __muc_data:
__muc_data = json.loads(__muc_data)
elif habit[MUC_TRACE_KEY] or habit[MUC_KEY]:
# muc_list = set(habit[MUC_TRACE_KEY] + habit[MUC_KEY]) # 这里只要userid 不要domain
muc_list = habit[MUC_KEY] + list(filter(lambda x: x not in habit[MUC_KEY], habit[MUC_TRACE_KEY]))
sql_logger.debug('WATCH ORDER {}'.format(muc_list))
__muc_data = await self.muc_habit_data(data=muc_list, user=user)
try:
__muc_data = sorted(__muc_data, key=lambda x: muc_list.index(x.get('uri')))
except ValueError:
sql_logger.exception("ORDER PROBLEM : NOT IN LIST")
sql_logger.debug('WATCH ORDER {}'.format(__muc_data))
redis_cli.set(name=_k, value=json.dumps(__muc_data, ensure_ascii=False), ex=60)
if __muc_data:
sql_logger.debug('muc data {}'.format(__muc_data))
__muc_list = [x.get('uri') for x in __muc_data]
# 纯中文
if not chinese_pattern.sub('', key):
key = formulate_text(key)
# __muc_data中包含每个群组有的用户 根据中文还是英文 可以分别依靠user_data做一次判断
if common:
_r1 = list((filter(lambda x: key in x['label'], __muc_data)))
_r2 = await self.search_group(user_id=user, username=raw_key, limit=len(__muc_list), offset=0,
habit='', exclude=__muc_list, origin=origin, common=common,
from_habit=True)
_r2 = sorted(_r2, key=lambda x: __muc_list.index(x.get('uri')))
_r3 = list((filter(lambda x: get_similar_bool(a=raw_key, b=x['label']), __muc_data)))
result = merge_list_of_dict(_r1, _r2, _r3)
else:
_r1 = list((filter(lambda x: key in x['label'], __muc_data)))
_r2 = list((filter(lambda x: get_similar_bool(a=raw_key, b=x['label']), __muc_data)))
result = merge_list_of_dict(_r1, _r2)
elif chinese_pattern.findall(formulate_text(key)) and chinese_pattern.sub('', formulate_text(key)):
key = formulate_text(key)
for __d in __muc_data:
__d['label'] = formulate_text(__d['label'])
_r1 = list(filter(lambda x: key in x['label'], __muc_data))
sql_logger.debug('R1 {}'.format(_r1))
# 群名称的拼音 后续撤掉
# 先取每个结果的label 得到[拼音,首字母]的结果 之后用map分别得到key是否在里 再用reduce进行或操作
_r2 = list(filter(lambda x: reduce(lambda a, b: a + b, list(
map(lambda x: True if key in x else False, pinyin.get_all(x['label'])))),
__muc_data))
_r3 = list(filter(lambda x: get_similar_bool(key, x['label']), __muc_data))
chinese_words = chinese_pattern.findall(key)
__k = list(map(lambda x: pinyin.get_pinyin(x), chinese_words))
test = {f: t for f, t in zip(chinese_words, __k)}.items()
for i in test:
key = key.replace(i[0], i[1])
_r4 = list(filter(lambda x: reduce(lambda a, b: a + b, list(
map(lambda x: True if key in x else False, pinyin.get_all(x['label'])))),
__muc_data))
if common:
_r5 = await self.search_group(user_id=user, username=raw_key, limit=len(__muc_list), offset=0,
habit='', exclude=__muc_list, origin=origin, common=common,
from_habit=True)
result = merge_list_of_dict(_r1, _r2, _r3, _r4, _r5)
else:
result = merge_list_of_dict(_r1, _r2, _r3, _r4)
else:
key = formulate_text(key)
for __d in __muc_data:
__d['label'] = formulate_text(__d['label'])
_r1 = list(filter(lambda x: key in x['label'], __muc_data))
sql_logger.debug('R1 {}'.format(_r1))
_r2 = list(filter(lambda x: key in x['uri'], __muc_data))
sql_logger.debug('R2 {}'.format(_r2))
# 群名称的拼音 后续撤掉
# 先取每个结果的label 得到[拼音,首字母]的结果 之后用map分别得到key是否在里 再用reduce进行或操作
_r3 = list(filter(lambda x: reduce(lambda a, b: a + b, list(
map(lambda x: True if key in x else False, pinyin.get_all(x['label'])))),
__muc_data))
_r4 = list(filter(lambda x: get_similar_bool(key, x['label']), __muc_data))
if common:
_r5 = await self.search_group(user_id=user, username=raw_key, limit=len(__muc_list), offset=0,
habit='', exclude=__muc_list,
from_habit=True)
result = merge_list_of_dict(_r1, _r2, _r3, _r4, _r5)
else:
result = merge_list_of_dict(_r1, _r2, _r3, _r4)
sql_logger.debug(
'PINYIN {}'.format([pinyin.get_all(x['label']) for x in __muc_data]))
sql_logger.debug('R3 {}'.format(_r3))
# self.close()
sql_logger.debug('returning result {}'.format(list(result)))
return list(result)
async def single_habit_data(self, data, user_domain):
s_result = list()
s_data = list(map(lambda x: x.split('@')[0], data))
sql = """SELECT aa.user_id, aa.department, aa.icon, aa.user_name, aa.mood, aa.pinyin FROM ( SELECT a.user_id, a.department, b.url AS icon, a.user_name, b.mood, a.pinyin FROM host_users a LEFT JOIN vcard_version b ON a.user_id = b.username WHERE a.hire_flag = 1 AND LOWER(a.user_type) != 's' AND a.user_id = ANY($1) and a.host_id = ANY(select id from host_info where host = $2 )) aa """
pgconn = await asyncpg.connect(self.conn_str)
stmt = await pgconn.prepare(sql)
for (user_id, department, icon, user_name, mood, __pinyin) in await stmt.fetch(s_data, user_domain):
res = dict()
row = [user_id, department, icon, user_name, mood, __pinyin]
row = ['' if x is None else x for x in row]
res['qtalkname'] = row[0]
res['uri'] = row[0] + '@' + domain
res['content'] = row[1]
res['icon'] = row[2]
res['name'] = row[3]
res['label'] = row[3] + '(' + row[0] + ')'
if row[4]:
res['label'] = res['label'] + ' - ' + row[4]
res['pinyin'] = row[5]
s_result.append(res)
await pgconn.close()
sql_logger.debug('SINGLE HABIT {}'.format(s_result))
return s_result
async def muc_habit_data(self, data, user):
if '@' in user:
user_s_name = user.split('@')[0]
user_domain = user.split('@')[1]
if isinstance(conference_str, str):
muc_domain = conference_str
else:
muc_domain = 'conference.' + user_domain
else:
sql_logger.error('SEARCH USER WITHOUT DOMAIN')
return []
s_result = list()
s_data = list(map(lambda x: x.split('@')[0], data))
# s_data = ','.join(list(map(lambda x: x + '@conference.' + domain, data)))
sql = """select a.muc_name, a.domain, b.show_name, b.muc_title, b.muc_pic, b.show_name_pinyin from user_register_mucs as a left join muc_vcard_info as b on concat(a.muc_name, '@', a.domain) = b.muc_name where a.registed_flag != 0 and a.username = $2 and a.host = $3 and a.muc_name = ANY($1)"""
injection = [s_data, user_s_name, user_domain]
# 下面这个注解可取回群成员
# sql = """SELECT a.muc,a.user_list, b.show_name, b.muc_title, b.muc_pic, b.show_name_pinyin as pinyin FROM (SELECT (muc_name || '@' || domain )as muc, array_agg(username) as user_list FROM user_register_mucs WHERE registed_flag != 0 AND muc_name = ANY($1) GROUP BY muc_name ||'@'|| domain )a LEFT JOIN muc_vcard_info b on a.muc = b.muc_name"""
# injection = [s_data]
pgconn = await asyncpg.connect(self.conn_str)
stmt = await pgconn.prepare(sql)
for (url, s_domain, show_name, muc_title, muc_pic, __pinyin) in await stmt.fetch(*injection):
row = [url, s_domain, show_name, muc_title, muc_pic, __pinyin]
# if user not in row[1]:
# continue
row = ['' if x is None else x for x in row]
res = dict()
res['uri'] = row[0] + '@' + row[1]
# res['member_list'] = row[1]
res['label'] = row[2]
res['content'] = row[3]
res['icon'] = row[4]
res['pinyin'] = row[5]
s_result.append(res)
await pgconn.close()
sql_logger.debug('MUC HABIT {}'.format(s_result))
return s_result
async def search_user(self, username, user_id, limit=5, offset=0, habit='', exclude=None):
s_result = list()
exclude_list = []
if '@' in user_id:
user_s_name = user_id.split('@')[0]
user_domain = user_id.split('@')[1]
else:
sql_logger.error('SEARCH USER WITHOUT DOMAIN')
return []
if exclude:
exclude_list = {'{}'.format(x.get('qtalkname')) for x in exclude}
offset = offset - len(exclude)
if offset < 0:
offset = 0
regex_tag = username.startswith(REGEX_TAG)
if regex_tag:
search_model = '~'
username = username[1:]
else:
search_model = 'ilike'
username = '%{}%'.format(username)
if if_cached:
sql = """SELECT aa.user_id, aa.department, bb.url as icon, CASE WHEN aa.nick != '' THEN aa.nick ELSE aa.user_name END, bb.mood , aa.pinyin
FROM (
SELECT a.user_id, b.department, b.user_name, b.pinyin, a.nick
FROM (
SELECT uu.user_id || '@' || hh.host as user_id,'' as nick, uu.host_id as hostid
FROM host_users uu
LEFT JOIN host_info hh
ON uu.host_id = hh.id
WHERE uu.hire_flag = 1 AND LOWER(uu.user_type) != 's' AND uu.user_id <> ALL($4) AND (uu.user_id ILIKE $1 OR uu.user_name {search_model} $1 OR uu.pinyin ILIKE $1 ) AND uu.host_id = ANY(select id from host_info where host = $6 )
UNION
SELECT cc.subkey AS user_id, cc.configinfo as nick, hh.id as hostid
FROM client_config_sync cc
LEFT JOIN host_info hh
ON cc.host = hh.host
WHERE split_part(cc.subkey,'@',1) <> ALL($4) AND cc.username = $5 AND cc.configkey = 'kMarkupNames' AND cc.configinfo {search_model} $1 AND cc.host = $6
) a
LEFT JOIN host_users b
ON split_part(a.user_id,'@',1) = b.user_id AND a.hostid = b.host_id
) aa
LEFT JOIN vcard_version bb
ON aa.user_id = bb.username || '@' || bb.host
ORDER BY aa.user_id ASC LIMIT $2 OFFSET $3"""
sql = sql.format(search_model=search_model)
injection = [username, limit, offset, exclude_list, user_s_name, user_domain]
else:
sql = """SELECT aa.user_id, aa.department, bb.url as icon, CASE WHEN aa.nick != '' THEN aa.nick ELSE aa.user_name END, bb.mood , aa.pinyin
FROM
(
SELECT a.user_id, b.department, b.user_name, b.pinyin, a.nick
FROM (
SELECT uu.user_id || '@' || hh.host as user_id,'' as nick, uu.host_id as hostid
FROM host_users uu
LEFT JOIN host_info hh
ON uu.host_id = hh.id
WHERE uu.hire_flag = 1 AND LOWER(uu.user_type) != 's' AND
( uu.user_id ILIKE $1 OR uu.user_name {search_model} $1 OR uu.pinyin ILIKE $1 ) AND uu.host_id = ANY(select id from host_info where host = $5 )
UNION
SELECT cc.subkey AS user_id, cc.configinfo as nick, hh.id as hostid
FROM client_config_sync cc
LEFT JOIN host_info hh
ON cc.host = hh.host
WHERE cc.username = $4 AND cc.configkey = 'kMarkupNames' AND cc.configinfo {search_model} $1 AND cc.host = $5
) a
LEFT JOIN host_users b
ON split_part(a.user_id, '@', 1) = b.user_id AND a.hostid = b.host_id
) aa
LEFT JOIN vcard_version bb
ON aa.user_id = bb.username || '@' || bb.host
LEFT JOIN
(
SELECT CASE WHEN m_from || '@' || from_host = $6 THEN m_to || '@' || to_host ELSE m_from || '@' || from_host END AS contact, max(create_time) mx
FROM msg_history
WHERE (m_from = $4 and from_host = $5 ) or (m_to = $4 and to_host = $5 )
GROUP BY contact
) cc
ON aa.user_id = cc.contact
ORDER BY cc.mx DESC nulls last
LIMIT $2
OFFSET $3"""
sql = sql.format(search_model=search_model)
injection = [username, limit, offset, user_s_name, user_domain, user_id]
pgconn = await asyncpg.connect(self.conn_str)
stmt = await pgconn.prepare(sql)
for (user_id, department, icon, user_name, mood, __pinyin) in await stmt.fetch(*injection):
res = dict()
row = [user_id, department, icon, user_name, mood, __pinyin]
row = ['' if x is None else x for x in row]
res['qtalkname'] = row[0].split('@')[0]
res['uri'] = row[0]
res['content'] = row[1]
res['icon'] = row[2]
res['name'] = row[3]
res['label'] = row[3] + '(' + row[0] + ')'
if row[4]:
res['label'] = res['label'] + ' - ' + row[4]
res['pinyin'] = row[5]
s_result.append(res)
if if_cached and habit:
sql_logger.debug('BEFORE HABIT REARRANGE {}\n HABIT {}'.format(s_result, habit))
s_result = self.sort_by_habit(data=s_result, habit=habit[SINGLE_KEY], name_key='qtalkname',
search_key=username)
sql_logger.debug('AFTER HABIT REARRANGE {}'.format(s_result))
elif if_cached and not habit:
sql_logger.error("CACHED BUT NO HABIT, userid : {user_id}, username : {username}}".format(user_id=user_id,
username=username))
# 将完全匹配放在第一, todo:以后应该从userdata取 同时增加更新机制
if '.' in username and s_result:
username = username + '@' + user_domain
tag = False
for x in s_result:
if username == x.get('uri'):
__ = s_result.pop(x)
s_result = [__] + s_result
tag = True
if not tag and self.user_data:
__complete_match = self.user_data.get(username)
if __complete_match:
res = dict()
res['qtalkname'] = __complete_match['i'].split('@')[0]
res['uri'] = __complete_match['i']
res['content'] = __complete_match['d']
res['icon'] = __complete_match['u']
res['name'] = __complete_match['n']
res['label'] = __complete_match['n'] + '(' + __complete_match['i'] + ')'
if __complete_match['m']:
res['label'] = res['label'] + ' - ' + __complete_match['m']
res['pinyin'] = __complete_match['p']
s_result = [res] + s_result
await pgconn.close()
sql_logger.debug('SINGLE RESULT {}'.format(s_result))
return s_result
async def search_group(self, user_id, username, limit=5, offset=0, habit='', exclude=None, origin=True,
common=True, from_habit=False):
# todo 这里写的很丑 有时间可以优化一下
if '@' in user_id:
user_s_name = user_id.split('@')[0]
user_domain = user_id.split('@')[1]
else:
sql_logger.error('SEARCH USER WITHOUT DOMAIN')
return []
regex_tag = username.startswith(REGEX_TAG)
if regex_tag:
search_model = '~'
username = username[1:]
else:
search_model = 'ilike'
username = '%{}%'.format(username)
raw_key = username.strip()
__start_time = time.time()
s_result = list()
key = None
if not exclude:
exclude = []
if common:
key = username
key = key.split()
_key_list = []
for _k in key:
# 纯中文
if not chinese_pattern.sub('', _k):
if len(_k) >= 2:
if not regex_tag:
_k = '%{}%'.format(_k)
_key_list.append(_k)
else:
if len(_k) > 3:
if not regex_tag:
_k = '%{}%'.format(_k)
_key_list.append(_k)
# key = list(filter(lambda x: len(x) >= 2, key))
key = _key_list
if key:
if user_s_name in key:
if not key.remove(user_id):
common = False
# return None
else:
common = False
# 根据key来返回命中高亮是返回汉子还是id TODO: 需要直接通过优化sql选择
if common:
if not chinese_pattern.findall(''.join(key)):
ret_user_name = False
else:
ret_user_name = True
if not exclude:
exclude = set()
offset = offset - len(exclude)
if offset < 0:
offset = 0
if from_habit:
# 此处只是为了从缓存cache提供的群组踵查找组内用户 如果key不是查找用户的话应该直接返回空
if key:
sql = self.make_common_sql(keys=key, origin=False, common=common, habit_tag=True)
else:
return []
exclude_list = list(map(lambda x: x.split('@')[0], exclude))
else:
sql = self.make_common_sql(keys=key, origin=origin, common=common)
exclude_list = {'{}'.format(x.get('uri', '')) for x in exclude}
exclude_list = list(map(lambda x: x.split('@')[0], exclude_list))
sql = sql.format(search_model=search_model)
if from_habit:
injection = [*key, user_id, offset, limit, exclude_list, user_domain]
elif common and origin:
injection = [*key, user_s_name, offset, limit, exclude_list, username, user_domain]
elif common and not origin:
injection = [*key, user_s_name, offset, limit, exclude_list, user_domain]
elif not common and origin:
injection = [user_s_name, username, limit, offset, exclude_list, user_domain]
else:
return []
pgconn = await asyncpg.connect(self.conn_str)
stmt = await pgconn.prepare(sql)
for (muc_name, s_domain, show_name, muc_title, muc_pic, users) in await stmt.fetch(*injection):
row = [muc_name, s_domain, show_name, muc_title, muc_pic, users]
row = ['' if x is None else x for x in row]
res = dict()
# res['uri'] = row[0] + '@' + row[1]
res['uri'] = row[0]
res['label'] = row[2]
res['content'] = row[3]
res['icon'] = row[4]
__hits = []
from_common = False
from_name = False
if row[5]:
if isinstance(row[5], list):
for i in row[5]:
if i == ['']:
from_name = True
continue
if not i:
continue
if isinstance(i, str):
if '|' in i:
__hits.extend(i.split('|'))
else:
__hits.append(i)
elif isinstance(i, list):
for u in i:
if isinstance(u, str):
if '|' in u:
__hits.extend(u.split('|'))
else:
__hits.append(u)
elif isinstance(u, list):
for k in u:
if '|' in k:
__hits.extend(k.split('|'))
else:
__hits.append(k)
else:
raise TypeError("WRONG COMMON MEMBER HITS {}".format(row[5]))
# 应该不会出现是str的情况, 如果出现基本上也过不去长度的检测
elif isinstance(row[5], str):
if '|' in row[5]:
__hits.extend(row[5].split('|'))
else:
__hits.append(row[5])
else:
from_name = True
if __hits and len(__hits) >= len(key):
from_common = True
res['hit'] = __hits
elif __hits and len(__hits) < len(key):
from_common = False
if not from_name:
continue
if from_common and from_name:
res['todoType'] = 6
elif from_common and not from_name:
res['todoType'] = 4
elif not from_common and from_name:
res['todoType'] = 2
s_result.append(res)
await pgconn.close()
if not from_habit:
if if_cached and habit:
_habit = list(map(lambda x: x + '@conference.' + domain, habit[MUC_KEY]))
sql_logger.debug('BEFORE HABIT REARRANGE {}\n HABIT {}'.format(s_result, habit))
s_result = self.sort_by_habit(data=s_result, habit=_habit, name_key='uri')
sql_logger.debug('AFTER HABIT REARRANGE {}'.format(s_result))
# s_result = self.sort_by_habit(data=s_result, habit=habit[MUC_KEY], name_key='uri')
elif if_cached and not habit:
sql_logger.error(
"CACHED BUT NO HABIT, userid : {user_id}, username : {username}}".format(user_id=user_id,
username=username))
else:
_habit = list(map(lambda x: x + '@conference.' + domain, exclude_list))
# s_result = sorted(s_result, key=lambda x: [x for x in exclude_list].index(x))
sql_logger.debug('BEFORE HABIT REARRANGE {}\n HABIT {}'.format(s_result, habit))
s_result = self.sort_by_habit(data=s_result, habit=_habit, name_key='uri')
sql_logger.debug('AFTER HABIT REARRANGE {}'.format(s_result))
if common and ret_user_name and self.user_data:
for _r in s_result:
hits = _r.get('hit', '')
if not hits:
break
if isinstance(hits, list):
# name = [x.get('n') for x in user_data if x.get('i') in hits]
# name = [user_data.get(x.split('@')[0]).get('n', '') for x in hits]
name = [self.user_data.get(x, {}).get('n', '') for x in hits]
else:
# name = [x.get('n') for x in user_data if x.get('i') == hits]
name = [self.user_data.get(x, {}).get('n', '') for x in hits]
_r['hit'] = name
sql_logger.debug('GROUP RESULT {}'.format(s_result))
__end_time = time.time()
sql_logger.info("SEARCH GROUP USED {}".format(__end_time - __start_time))
return s_result
async def search_group_by_single(self, user_id, key, limit=5, offset=0, habit='', exclude=None):
if '@' in user:
user_s_name = user.split('@')[0]
user_domain = user.split('@')[1]
else:
sql_logger.error('SEARCH USER WITHOUT DOMAIN')
return []
if not isinstance(conference_str, str):
if '@' in user_id:
conference_str = 'conference.' + user_id.split('@')[1]
else:
raise TypeError("CANT DETERMINE DOMAIN FOR SEARCH conference_str {}".format(conference_str))
if not exclude:
exclude = []
key = key.split()
key = list(filter(lambda x: len(x) > 2, key))
if key:
if user_id in key:
if not key.remove(user_id):
return None
else:
return None
# 根据key来返回命中高亮是返回汉子还是id TODO: 需要直接通过优化sql选择
if not chinese_pattern.findall(''.join(key)):
ret_user_name = False
else:
ret_user_name = True
key_count = len(key)
s_result = list()
if if_cached:
# sql = """SELECT A.muc_name, A.domain, B.show_name, B.muc_title, B.muc_pic FROM ( SELECT muc_name, domain FROM user_register_mucs WHERE username = $1 AND registed_flag != 0 AND muc_name IN ( SELECT muc_name FROM user_register_mucs WHERE username IN ( SELECT user_id FROM host_users WHERE hire_flag = 1 AND (user_id ~ ANY($2) OR user_name ~ ANY($2) OR pinyin ~ ANY($2))) GROUP BY muc_name HAVING COUNT(*) = $3 )) A JOIN muc_vcard_info B ON (A.muc_name || $6) = b.muc_name LIMIT $4 OFFSET $5"""
sql = self.make_common_sql(key)
exclude_list = {'{}'.format(x.get('uri', '')) for x in exclude}
injection = [*key, user_id, '@' + conference_str, offset, limit, exclude_list]
else:
sql = """SELECT A.muc_room_name, split_part(b.muc_name, '@', 2) as domain, B.show_name, B.muc_title, B.muc_pic FROM (SELECT muc_room_name, MAX(create_time) as max FROM muc_room_history aa RIGHT JOIN (SELECT muc_name FROM user_register_mucs WHERE username = $1 AND registed_flag != 0 AND muc_name in (SELECT muc_name FROM user_register_mucs WHERE username IN (SELECT user_id FROM host_users WHERE hire_flag = 1 AND (user_id ~ any($2) OR user_name ~ any($2) OR pinyin ~ any($2))) GROUP BY muc_name HAVING COUNT(*) = $3)) bb ON aa.muc_room_name = bb.muc_name GROUP BY muc_room_name ORDER BY max DESC nulls last LIMIT $4 OFFSET $5) A JOIN muc_vcard_info B ON (a.muc_room_name || $6) = b.muc_name"""
injection = [user_id, key, key_count, limit, offset, '@' + conference_str]
pgconn = await asyncpg.connect(self.conn_str)
stmt = await pgconn.prepare(sql)
for (muc_name, s_domain, show_name, muc_title, muc_pic, users) in await stmt.fetch(*injection):
row = [muc_name, s_domain, show_name, muc_title, muc_pic, users]
row = ['' if x is None else x for x in row]
res = dict()
res['uri'] = row[0] + '@' + row[1]
res['label'] = row[2]
res['content'] = row[3]
res['icon'] = row[4]
res['hit'] = row[5] if isinstance(row[5], list) else [row[5]]
s_result.append(res)
await pgconn.close()
if ret_user_name and user_data:
for _r in s_result:
hits = _r.get('hit', '')
if isinstance(hits, list):
# name = [x.get('n') for x in user_data if x.get('i') in hits]
name = [user_data.get(x).get('n', '') for x in hits]
else:
# name = [x.get('n') for x in user_data if x.get('i') == hits]
name = [user_data.get(x).get('n', '') for x in hits]
_r['hit'] = name
if if_cached and habit:
_habit = list(map(lambda x: x + '@conference.' + domain, habit[MUC_KEY]))
sql_logger.debug('BEFORE HABIT REARRANGE {}\n HABIT {}'.format(s_result, habit))
s_result = self.sort_by_habit(data=s_result, habit=_habit, name_key='uri')
sql_logger.debug('AFTER HABIT REARRANGE {}'.format(s_result))
elif if_cached and not habit:
sql_logger.error("CACHED BUT NO HABIT, userid : {user_id}, username : {username}}".format(user_id=user_id,
username=key))
sql_logger.debug('COMMON RESULT {}'.format(s_result))
return s_result
async def history_user(self, user_id, term, offset, limit, to_user=None, time_range=None, agg_tag=False):
s_result = list()
if '@' in user_id:
user_s_name = user_id.split('@')[0]
user_domain = user_id.split('@')[1]
else:
sql_logger.error('SEARCH USER WITHOUT DOMAIN')
return []
regex_tag = term.startswith(REGEX_TAG)
if regex_tag:
search_model = '~'
term = term[1:]
else:
search_model = 'ilike'
term = '%{}%'.format(term)
if not agg_tag and to_user:
sql = """SELECT create_time as date, m_from, from_host as fromhost, realfrom, m_to , to_host as tohost, realto as realto, m_body as msg, msg_id
FROM msg_history
WHERE xpath('/message/body/text()',m_body::xml)::text {search_model} $1 {user_limit} {time_limit_start} {time_limit_end}
ORDER BY create_time DESC
OFFSET $3
LIMIT $2"""
injection = [term, limit, offset, user_s_name, user_domain]
if to_user:
if isinstance(to_user, list):
# TODO: 这里可能要做prepare 不然在没有realto的时候拼接字段会导致慢查询
user_limit = """AND (
(m_from = $4 and from_host = $5 and m_to || '@' || to_host = ANY($6)
OR
(m_to = $4 and to_host = $5 and m_from || '@' || from_host = ANY($6)
)"""
injection += [to_user]
elif isinstance(to_user, str):
to_user_s_name = to_user.split('@')[0]
to_user_domain = to_user.split('@')[1]
user_limit = """AND (
(m_from = $4 and from_host = $5 and m_to = $6 and to_host = $7 )
OR
(m_to = $4 and to_host = $5 and m_from = $6 and from_host =$7 )
)"""
injection += [to_user_s_name, to_user_domain]
else:
user_limit = ''
else:
# user_limit = "AND (m_from = $4 or m_to = $4)"
user_limit = "AND ((m_from = $4 and from_host = $5) or(m_to = $4 and to_host = $5))"
time_limit_start = ''
time_limit_end = ''
if time_range and isinstance(time_range, list):
first_index = len(injection) + 1
second_index = len(injection) + 2
if time_range[0] and time_range[1]:
time_limit_start = "AND create_time > ${} AND create_time < ${}".format(first_index, second_index)
injection += time_range
elif time_range[0] and not time_range[1]:
time_limit_start = "AND create_time > ${}".format(first_index)
injection += [time_range[0]]
elif time_range[1] and not time_range[0]:
time_limit_end = "AND create_time < ${}".format(first_index)
injection += [time_range[1]]
sql = sql.format(user_limit=user_limit, time_limit_start=time_limit_start, time_limit_end=time_limit_end,
search_model=search_model)
pgconn = await asyncpg.connect(self.conn_str)
stmt = await pgconn.prepare(sql)
for (date, m_from, fromhost, realfrom, m_to, tohost, realto, msg, msg_id) in await stmt.fetch(*injection):
res = dict()
row = [date, m_from, fromhost, realfrom, m_to, tohost, realto, msg, msg_id]
row = ['' if x is None else x for x in row]
if row[0]:
res['date'] = row[0].strftime('%Y-%m-%d %H:%M:%S')
else:
res['date'] = ''
res['from'] = row[1] + '@' + row[2]
res['realfrom'] = row[3] if row[3] else res['from']
res['to'] = row[4] + '@' + row[5]
res['realto'] = row[6] if row[6] else res['to']
res['msg'] = row[7]
res['msgid'] = row[8]
s_result.append(res)
await pgconn.close()
else:
sql = """SELECT a.count, b.create_time as date, b.m_from, b.from_host as fromhost, b.realfrom, b.m_to, b.to_host as tohost, b.realto, b.m_body as msg, a.conversation, b.msg_id, a.id FROM
(
SELECT count(1) as count, MAX(id) as id, m_from||'@'||from_host || '_' || m_to||'@'||to_host as conversation
FROM msg_history
WHERE xpath('/message/body/text()',m_body::xml)::text {search_model} $1 AND ( (m_from = $4 and from_host = $5 ) or (m_to = $4 and to_host = $5) {time_limit_start} {time_limit_end})
GROUP BY m_from||'@'||from_host || '_' || m_to||'@'||to_host
ORDER BY id desc
OFFSET $3
LIMIT $2
) a
LEFT JOIN msg_history b
ON a.id = b.id"""
injection = [term, limit, offset, user_s_name, user_domain]
time_limit_start = ''
time_limit_end = ''
if time_range and isinstance(time_range, list):
if time_range[0] and time_range[1]:
time_limit_start = "AND b.create_time > $6 AND b.create_time < $7 "
injection += time_range
elif time_range[1] and not time_range[0]:
time_limit_end = "AND b.create_time < $6"
injection += time_range[1]
elif time_range[0] and not time_range[1]:
time_limit_end = "AND b.create_time > $6"
injection += time_range[0]
sql = sql.format(time_limit_start=time_limit_start, time_limit_end=time_limit_end,
search_model=search_model)
pgconn = await asyncpg.connect(self.conn_str)
stmt = await pgconn.prepare(sql)
for (count, date, m_from, fromhost, realfrom, m_to, tohost, realto, msg, conversation,
msg_id, _id) in await stmt.fetch(*injection):
row = [count, date, m_from, fromhost, realfrom, m_to, tohost, realto, msg, conversation, msg_id, _id]
row = ['' if x is None else x for x in row]
res = dict()
res['count'] = row[0]
if row[1]:
res['date'] = row[1].strftime('%Y-%m-%d %H:%M:%S')
else:
res['date'] = ''
res['from'] = row[2] + '@' + row[3]
res['realfrom'] = row[4] if row[4] else res['from']
res['to'] = row[5] + '@' + row[6]
res['realto'] = row[7] if row[7] else res['to']
res['msg'] = row[8]
res['conversation'] = row[9]
res['msgid'] = row[10]
res['id'] = row[11]
s_result.append(res)
s_result = self.handle_sql_result(data=s_result)
await pgconn.close()
return s_result
async def history_muc(self, user_id, term, offset, limit, to_muc=None, time_range=None, agg_tag=False):
"""
:param user_id:
:param user_mucs:
:param term:
:param offset:
:param limit:
:param to_muc:
:param time_range:
:param agg_tag:
:return:
"""
s_result = list()
if '@' in user_id:
user_s_name = user_id.split('@')[0]
user_domain = user_id.split('@')[1]
else:
sql_logger.error('SEARCH USER WITHOUT DOMAIN')
return
regex_tag = term.startswith(REGEX_TAG)
if regex_tag:
search_model = '~'
term = term[1:]
else:
search_model = 'ilike'
term = '%{}%'.format(term)
if not agg_tag and to_muc:
sql = """SELECT a.create_time as date, b.muc_name as _to, a.packet as msg, a.msg_id as msgid, b.show_name as label, b.muc_pic as icon
FROM muc_room_history a left join muc_vcard_info b
on a.muc_room_name = split_part(b.muc_name,'@',1)
WHERE xpath('/message/body/text()',packet::xml)::text {search_model} $1 {muc_limit} {time_limit_start} {time_limit_end}
ORDER BY create_time DESC
OFFSET $3
LIMIT $2"""
injection = [term, limit, offset, user_s_name]
time_limit_start = ''
time_limit_end = ''
if to_muc:
if isinstance(to_muc, list):
to_muc = list(map(lambda x: x.split('@')[0], to_muc))
muc_limit = "AND muc_room_name = ANY(SELECT muc_name FROM user_register_mucs WHERE username = $4 AND muc_name = ANY($5) )"
injection += [to_muc]
elif isinstance(to_muc, str):
to_muc = to_muc.split('@')[0]
muc_limit = "AND muc_room_name = ANY(SELECT muc_name FROM user_register_mucs WHERE username = $4 AND muc_name = $5)"
injection += [to_muc]
else:
return []
if time_range and isinstance(time_range, list):
if time_range[0] and time_range[1]:
time_limit_start = "AND create_time > $6"
time_limit_end = "AND create_time < $7"
injection += time_range
if time_range[0] and not time_range[1]:
time_limit_start = "AND create_time > $6"
injection += [time_range[0]]
if time_range[1] and not time_range[0]:
time_limit_end = "AND create_time < $6"
injection += [time_range[1]]
else:
muc_limit = "AND muc_room_name in (SELECT muc_name FROM user_register_mucs where username = $4 and registed_flag = 1)"
if time_range and isinstance(time_range, list):
if time_range[0] and time_range[1]:
time_limit_start = "AND create_time > $5"
time_limit_end = "AND create_time < $6"
injection += time_range
if time_range[0] and not time_range[1]:
time_limit_start = "AND create_time > $5"
injection += [time_range[0]]
if time_range[1] and not time_range[0]:
time_limit_end = "AND create_time < $5"
sql = sql.format(muc_limit=muc_limit, time_limit_start=time_limit_start, time_limit_end=time_limit_end,
search_model=search_model)
pgconn = await asyncpg.connect(self.conn_str)
stmt = await pgconn.prepare(sql)
for (date, _to, msg, msgid, label, icon) in await stmt.fetch(*injection):
row = [date, _to, msg, msgid, label, icon]
row = ['' if x is None else x for x in row]
res = dict()
if row[0]:
res['date'] = row[0].strftime('%Y-%m-%d %H:%M:%S')
else:
res['date'] = ''
res['to'] = row[1]
res['msg'] = row[2]
res['msgid'] = row[3]
res['from'] = ''
res['label'] = row[4]
res['icon'] = row[5]
s_result.append(res)
await pgconn.close()
else:
sql = """SELECT count, c.muc_name, b.msg_id, b.create_time as date, b.packet, c.show_name as label, c.muc_pic as icon , a.id FROM
(
SELECT count(1) as count, MAX(id) as id, muc_room_name
FROM muc_room_history
WHERE xpath('/message/body/text()',packet::xml)::text {search_model} $1 AND muc_room_name = ANY(SELECT muc_name FROM user_register_mucs where username = $4 and registed_flag = 1 AND host = $5 and domain = 'conference.' || $5 ) {time_limit_start} {time_limit_end}
GROUP BY muc_room_name
ORDER BY id desc
OFFSET $3
LIMIT $2
)a
LEFT JOIN muc_room_history b
ON a.id = b.id
LEFT JOIN muc_vcard_info c
on a.muc_room_name = split_part(c.muc_name,'@',1)"""
injection = [term, limit, offset, user_s_name, user_domain]
time_limit_start = ''
time_limit_end = ''
if time_range and isinstance(time_range, list):
if time_range[0] and time_range[1]:
time_limit_start = "AND create_time > $7"
time_limit_end = "AND create_time < $8"
injection += time_range
if time_range[0] and not time_range[1]:
time_limit_start = "AND create_time > $7"
injection += [time_range[0]]
if time_range[1] and not time_range[0]:
time_limit_end = "AND create_time < $7"
sql = sql.format(time_limit_start=time_limit_start, time_limit_end=time_limit_end,
search_model=search_model)
pgconn = await asyncpg.connect(self.conn_str)
stmt = await pgconn.prepare(sql)
for (count, muc_room_name, msg_id, date, packet, label, icon, __id) in await stmt.fetch(*injection):
row = [count, muc_room_name, msg_id, date, packet, label, icon, __id]
row = ['' if x is None else x for x in row]
res = dict()
res['count'] = row[0]
res['to'] = row[1]
res['msgid'] = row[2]
if row[3]:
res['date'] = row[3].strftime('%Y-%m-%d %H:%M:%S')
else:
res['date'] = ''
res['msg'] = row[4]
res['from'] = ''
res['label'] = row[5]
res['icon'] = row[6]
s_result.append(res)
await pgconn.close()
# s_result = self.handle_sql_result(data=s_result)
return s_result
async def history_file(self, user_id, term, offset=0, limit=5, time_range=None):
"""
:param user_id:
:param term:
:param offset:
:param limit:
:param time_range:
:return:
"""
s_result = list()
if '@' in user_id:
user_s_name = user_id.split('@')[0]
user_domain = user_id.split('@')[1]
else:
sql_logger.error('SEARCH USER WITHOUT DOMAIN')
return []
regex_tag = term.startswith(REGEX_TAG)
if regex_tag:
search_model = '~'
term = term[1:]
else:
search_model = 'ilike'
term = '%{}%'.format(term)
sql = """SELECT file, from_, pfv.muc_name as to_, date, msgid, pfv.show_name as label, pfv.muc_pic as icon, msg
FROM (
SELECT json(unnest(xpath('//body[@msgType="5"]/text()', packet::xml))::text) AS file, '' AS from_, muc_room_name AS to_, create_time AS date, msg_id AS msgid,packet as msg
FROM muc_room_history
WHERE muc_room_name IN (
SELECT muc_name
FROM user_register_mucs
WHERE username = $4
AND registed_flag = 1
AND host= $5
)
) pfc left join muc_vcard_info pfv
on pfc.to_= split_part(pfv.muc_name,'@',1)
UNION ALL
SELECT file, from_, to_, date, msgid, pfb.user_name as label, pfv.url as icon, msg
FROM (
SELECT json(unnest(xpath('/message/body[@msgType="5"]/text()', m_body::xml))::text) AS file, m_from || '@' || from_host as from_
, m_to || '@' || to_host as to_, create_time AS date
, msg_id AS msgid, m_body as msg
FROM msg_history
WHERE (m_from = $4 AND from_host = $5 )
OR (m_to = $4 AND to_host = $5 )
) pfx left join vcard_version pfv
on split_part(pfx.from_,'@',1) = pfv.username
left join host_users pfb
on pfv.username = pfb.user_id and pfv.host = ANY(SELECT host from host_info WHERE id = pfb.host_id)
WHERE file ->> 'FileName' {search_model} $1 {time_limit_start} {time_limit_end}
ORDER BY date desc
OFFSET $3
LIMIT $2
"""
injection = [term, limit, offset, user_s_name, user_domain]
time_limit_start = ''
time_limit_end = ''
if time_range and isinstance(time_range, list):
if time_range[0] and time_range[1]:
time_limit_start = "AND create_time > $5"
time_limit_end = "AND create_time < $6"
injection += time_range
if time_range[0] and not time_range[1]:
time_limit_start = "AND create_time > $5"
injection += [time_range[0]]
if time_range[1] and not time_range[0]:
time_limit_end = "AND create_time < $5"
sql = sql.format(time_limit_start=time_limit_start, time_limit_end=time_limit_end, search_model=search_model)
pgconn = await asyncpg.connect(self.conn_str)
stmt = await pgconn.prepare(sql)
for (file, from_, to_, date, msgid, label, icon, msg) in await stmt.fetch(*injection):
row = [file, from_, to_, date, msgid, label, icon, msg]
row = ['' if x is None else x for x in row]
res = dict()
res['fileinfo'] = json.loads(row[0])
res['from'] = row[1]
res['to'] = row[2]
if row[3]:
res['date'] = row[3].strftime('%Y-%m-%d %H:%M:%S')
else:
res['date'] = ''
res['msgid'] = row[4]
res['source'] = row[5] if row[5] else row[1]
res['icon'] = row[6]
res['msg'] = row[7]
res['mtype'] = 5
s_result.append(res)
await pgconn.close()
return s_result
async def history_single_file(self, user_id, term, offset=0, limit=5):
s_result = list()
sql = """SELECT * from ( SELECT unnest(xpath('//body[@msgType="5"]/text()',m_body::xml))::text::json as file, m_from, m_to,create_time as epo, msg_id as msgid from msg_history where m_from = $1 or m_to = $1) as pfx where pfx.file->>'FileName' ~ $2 order by pfx.time desc offset $3 limit $4"""
pgconn = await asyncpg.connect(self.conn_str)
stmt = await pgconn.prepare(sql)
injection = [user_id, term, offset, limit]
for (_file, _from, _to, _time, _msgid) in await stmt.fetch(*injection):
res = dict()
row = [_file, _from, _to, _time, _msgid]
row = ['' if x is None else x for x in row]
res['file'] = row[0]
res['from'] = row[1]
res['to'] = row[2]
res['time'] = row[3]
res['msgid'] = row[4]
res['domain'] = domain
res['chattype'] = 'chat' # chat groupchat
s_result.append(res)
# TODO SORT!!!!
await pgconn.close()
sql_logger.debug('FILE SINGLE RESULT {}'.format(s_result))
return s_result
async def history_muc_file(self, user_id, term, muc_list, offset=0, limit=5):
s_result = list()
sql = """SELECT * from ( SELECT unnest(xpath('//body[@msgType="5"]/text()',packet::xml))::text::json as file, nick, muc_room_name, create_time as time from muc_room_history where muc_room_name in $1 ) as pfx where pfx.file->>'FileName' ~ $2 order by pfx.time desc offset $3 limit $4"""
pgconn = await asyncpg.connect(self.conn_str)
stmt = await pgconn.prepare(sql)
injection = [muc_list, term, offset, limit]
for (_file, _from, _to, _time, _msgid) in await stmt.fetch(*injection):
res = dict()
row = [_file, _from, _to, _time, _msgid]
row = ['' if x is None else x for x in row]
res['file'] = row[0]
res['from'] = row[1]
res['to'] = row[2]
res['time'] = row[3]
res['msgid'] = row[4]
res['domain'] = domain
res['chattype'] = 'groupchat' # chat groupchat
s_result.append(res)
# TODO SORT!!!!
await pgconn.close()
sql_logger.debug('FILE MUC RESULT {}'.format(s_result))
return s_result
async def get_mucs_info(self, muc):
result = {}
if '@' not in muc:
muc = muc + '@' + conference_str
sql = """select show_name,muc_pic from muc_vcard_info where muc_name = $1"""
pgconn = await asyncpg.connect(self.conn_str)
stmt = await pgconn.prepare(sql)
injection = [muc]
for (muc_name, pic) in await stmt.fetch(*injection):
row = [muc_name, pic]
row = ['' if x is None else x for x in row]
result['show_name'] = row[0]
result['muc_pic'] = row[1]
# TODO SORT!!!!
await pgconn.close()
return result
async def get_person_info(self, person):
result = {}
if '@' in person:
person = person.split('@')[0]
sql = """ select a.user_name,b.url from host_users a join vcard_version b on a.user_id = $1 and a.user_id = b.username;"""
pgconn = await asyncpg.connect(self.conn_str)
stmt = await pgconn.prepare(sql)
injection = [person]
for (name, pic) in await stmt.fetch(*injection):
row = [name, pic]
row = ['' if x is None else x for x in row]
result['show_name'] = row[0]
result['url'] = row[1]
# TODO SORT!!!!
await pgconn.close()
return result
@staticmethod
def sort_by_habit(data, habit, name_key, search_key=''):
if not data:
return []
if not name_key:
sql_logger.warning("NO NAME_KEY FOUND :{}".format(data))
if not isinstance(data, list):
sql_logger.error("DATA NOT A LIST :{data}".format(data=data))
return data
if not isinstance(habit, (set, list)):
sql_logger.error("HABIT NOT A LIST :{habit}".format(habit=habit))
return data
# 取出result里所有的相关字段组成list
name_list = [x[name_key] for x in data]
for _h in habit[::-1]:
# for _h in habit[::-1]:
# sql_logger.info("name_list {},\n origin data {},\n habit {}".format(name_list, data, habit[::-1]))
# if search_key:
# if search_key in _h and _h not in name_list:
# sql_logger.warning("SHOULD ADD {} TO KEY {} RESULT {}".format(_h, search_key, data))
# if name_key == ''
# 如果habit里的人/群组在结果集里, 放在第一个
if _h in name_list and name_list.index(_h) != 0:
_t = data.pop(name_list.index(_h))
data = [_t] + data
_t = data.pop([x.get('uri') for x in data].index(_h))
data = [_t] + data
# data.remove(_)
# data = [_] + data
return data
@staticmethod
def handle_sql_result(data):
result = {}
for hit in data:
a = hit['conversation'].split('_')[0]
b = hit['conversation'].split('_')[1]
conv = sorted([a, b])[0] + '_' + sorted([a, b])[1]
if conv in result.keys():
_temp = result[conv]
if _temp['id'] > hit['id']:
result[conv]['count'] += hit['count']
else:
result[conv] = hit
result[conv]['count'] += _temp['count']
else:
result[conv] = hit
result = sorted(list(result.values()), key=lambda x: x.get('id'))
return result
@staticmethod
def make_common_sql(keys, origin=True, common=True, habit_tag=False):
"""
origin代表搜群名、 群id
common代表搜群内成员
habit_tag代表从缓存里搜索群内成员,所以有特定的组范围,sql有所不同
:param keys:
:param origin:
:param common:
:param habit_tag:
:return:
"""
sql = ""
if common:
if not keys:
return
case_pattern = []
union_pattern = []
key_len = len(keys)
for i, k in enumerate(keys):
case_pattern.append(
"""SELECT ${i}, user_id
FROM host_users
WHERE hire_flag = 1 AND user_id != ${searcher_index} AND ( user_id ilike ${i} OR user_name {search_model} ${i} OR pinyin ilike ${i} ) AND host_id = ANY(SELECT id FROM host_info WHERE host = ${searcher_domain_index})""".format(
i=i + 1, searcher_index='{searcher_index}', search_model='{search_model}',
searcher_domain_index='{searcher_domain_index}'))
union_pattern.append("""SELECT a.muc_name|| '@' || a.domain as muc_name, string_agg(a.username||'@'||a.host, '|') as hit, max(a.created_at) as time
FROM user_register_mucs a JOIN tmp2 b ON a.muc_name = b.muc_name
WHERE username IN (select user_id from tmp where key = ${i}) and a.registed_flag != 0 AND a.domain = 'conference.' || ${searcher_domain_index}
group by a.muc_name || '@' || a.domain""".format(i=i + 1,
searcher_domain_index='{searcher_domain_index}'))
# keys 对应 $1 ... ${len(keys)}
# 之后是searcher_index 序号为len + 1
# 然后是群组字符串 len + 2
# 然后是offset len + 3
# limit len + 4
# 排除在 len + 5
# 名字查询的key 在 len + 6
# domain 在 len + 7
if habit_tag and common:
sql = """
WITH tmp (key, user_id) AS (
{keys_pattern}
),
tmp2 (muc_name, domain, created_at) AS (
SELECT split_part(muc_name || '@' || domain,'@',1) ,split_part(muc_name || '@' || domain,'@',2), max(created_at) as created_at
FROM user_register_mucs
WHERE username = ${searcher_index} AND host = ${searcher_domain_index}
AND registed_flag != 0 AND muc_name = ANY(${exclude}) AND domain = 'conference.' || ${searcher_domain_index}
GROUP BY muc_name || '@' || domain
)
SELECT
aa.mucname,
split_part(bb.muc_name, '@', 2) AS domain,
bb.show_name,
bb.muc_title,
bb.muc_pic,
aa.tag
FROM (
SELECT mucname, tag from (
SELECT muc_name AS mucname, array_agg(hit) AS tag
FROM (
{select_pattern}
) foo
GROUP BY muc_name
HAVING COUNT(muc_name) = {length}
) boo
) aa
JOIN muc_vcard_info bb
ON (aa.mucname) = bb.muc_name
offset ${offset} limit ${limit}""".format(
# keys_pattern=' union all '.join(case_pattern),
# select_pattern=' union all '.join(union_pattern),
# length=key_len, conference_str_index=key_len + 2, offset=key_len + 3,
# limit=key_len + 4, exclude=key_len + 5, searcher_index=key_len + 1, searcher_domain_index=key_len + 6,search_model='{search_model}')
keys_pattern=' union all '.join(case_pattern),
select_pattern=' union all '.join(union_pattern),
searcher_index='{searcher_index}', searcher_domain_index='{searcher_domain_index}',
length=key_len, offset=key_len + 2,
limit=key_len + 3, exclude=key_len + 4).format(searcher_index=key_len + 1,
searcher_domain_index=key_len + 5,
search_model='{search_model}')
return sql
if origin and common:
# format 填空
sql = """
WITH tmp (key, user_id) AS (
{keys_pattern}
),
tmp2 (muc_name, domain, created_at) AS (
SELECT split_part(muc_name || '@' || domain,'@',1) ,split_part(muc_name || '@' || domain,'@',2), max(created_at) as created_at
FROM user_register_mucs
WHERE username = ${searcher_index} AND host = ${searcher_domain_index}
AND registed_flag != 0 AND muc_name <> ALL (${exclude}) AND domain = 'conference.' || ${searcher_domain_index}
GROUP BY muc_name || '@' || domain
)
SELECT
aa.mucname,
split_part(bb.muc_name, '@', 2) AS domain,
bb.show_name,
bb.muc_title,
bb.muc_pic,
aa.tag
FROM (
SELECT mucname, array_agg(tag) AS tag, MAX(time) as time
FROM(
SELECT mucname, tag, time from (
SELECT muc_name AS mucname, array_agg(hit) AS tag, max(time) as time
FROM (
{select_pattern}
) foo
GROUP BY muc_name
HAVING COUNT(muc_name) = {length}
) boo
union all
select a.muc_name|| '@' || a.domain as muccname, array[''] as hit, a.created_at as time
from tmp2 a join muc_vcard_info b on concat(a.muc_name, '@', a.domain) = b.muc_name
where (b.show_name {search_model} ${like_key} or b.muc_name ilike ${like_key})
) poo
GROUP BY mucname
) aa
JOIN muc_vcard_info bb
ON aa.mucname = bb.muc_name
ORDER BY time DESC
OFFSET ${offset} LIMIT ${limit}""".format(
# keys_pattern=' union all '.join(case_pattern),
# select_pattern=' union all '.join(union_pattern),
# searcher_index=key_len + 1, length=key_len, conference_str_index=key_len + 2, offset=key_len + 3,
# limit=key_len + 4, exclude=key_len + 5, like_key=key_len + 6, searcher_domain_index=key_len + 7,
# search_model='{search_model}')
keys_pattern=' union all '.join(case_pattern),
select_pattern=' union all '.join(union_pattern),
searcher_index='{searcher_index}', length=key_len, offset=key_len + 2,
limit=key_len + 3, exclude=key_len + 4, like_key='{like_key}',
searcher_domain_index='{searcher_domain_index}', search_model='{search_model}').format(
searcher_index=key_len + 1,
like_key=key_len + 5,
search_model='{search_model}', searcher_domain_index=key_len + 6)
elif common and not origin:
sql = """
WITH tmp (key, user_id) AS (
{keys_pattern}
),
tmp2 (muc_name, domain, created_at) AS (
SELECT split_part(muc_name || '@' || domain,'@',1) ,split_part(muc_name || '@' || domain,'@',2), max(created_at) as created_at
FROM user_register_mucs
WHERE username = ${searcher_index} AND host = ${searcher_domain_index}
AND registed_flag != 0 AND muc_name <> ALL (${exclude}) AND domain = 'conference.' || ${searcher_domain_index}
GROUP BY muc_name || '@' || domain
)
SELECT
aa.mucname,
split_part(bb.muc_name, '@', 2) AS domain,
bb.show_name,
bb.muc_title,
bb.muc_pic,
aa.tag
FROM (
SELECT mucname, tag, time from (
SELECT muc_name AS mucname, array_agg(hit) AS tag, max(time) as time
FROM (
{select_pattern}
) foo
GROUP BY muc_name
HAVING COUNT(muc_name) = {length}
) boo
) aa
JOIN muc_vcard_info bb
ON aa.mucname = bb.muc_name
ORDER BY time DESC
offset ${offset} limit ${limit}""".format(
# keys_pattern=' union all '.join(case_pattern),
# select_pattern=' union all '.join(union_pattern),
# length=key_len, conference_str_index=key_len + 2, offset=key_len + 3,
# limit=key_len + 4, exclude=key_len + 5, searcher_index=key_len + 1, searcher_domain_index=key_len + 6,
# search_model='{search_model}')
keys_pattern=' union all '.join(case_pattern),
select_pattern=' union all '.join(union_pattern),
searcher_index='{searcher_index}', searcher_domain_index='{searcher_domain_index}',
length=key_len, offset=key_len + 2,
limit=key_len + 3, exclude=key_len + 4).format(searcher_index=key_len + 1,
searcher_domain_index=key_len + 5,
search_model='{search_model}')
elif not common and origin:
sql = """SELECT
b.muc_name as mucname, split_part(b.muc_name,'@',2) as domain, b.show_name, b.muc_title, b.muc_pic, array['']
FROM
user_register_mucs as a left join muc_vcard_info as b
ON
concat(a.muc_name, '@', a.domain) = b.muc_name
WHERE
a.registed_flag != 0 and a.username = $1 and (b.show_name {search_model} $2 or b.muc_name ~ $2) and b.muc_name <> ALL ($5) AND a.host = $6
order by b.update_time desc limit $3 offset $4"""
return sql
if DB_VERSION is None:
domain = ''
__user_lib = UserLib()
DB_VERSION = __user_lib.get_db_version()
domain = __user_lib.get_domain()
if len(domain) == 1:
domain = domain[0]
conference_str = 'conference.' + domain
else:
conference_str = {}
for d in domain:
conference_str[d] = 'conference.' + d
__user_lib.conn.close()
sql_logger.info('PGSQL VERSION : {}'.format(DB_VERSION))
# 判断数据库能否async
if if_async is None:
if_async = False
if PY_VERSION and isinstance(PY_VERSION, str):
if not PY_VERSION.startswith('3') or int(PY_VERSION.split('.')[1]) < 5:
sql_logger.warning("UNSATISFIED PYTHON VERSION {}".format(PY_VERSION))
else:
if DB_VERSION and isinstance(DB_VERSION, str):
if int(DB_VERSION.split('.')[0]) < 9 or int(DB_VERSION.split('.')[0]) > 10:
sql_logger.warning("UNSATISFIED PSQL VERSION {}".format(DB_VERSION))
else:
if_async = True
if if_async == None:
sql_logger.error("asyncpg module work in wrong environment")
raise ConnectionError("asyncpg module work in wrong environment")
sql_logger.info('USE ASYNC : {}'.format(if_async == True))
| 50.722317
| 763
| 0.50172
| 18,018
| 147,957
| 3.875791
| 0.02603
| 0.019503
| 0.010826
| 0.011928
| 0.921041
| 0.90466
| 0.889266
| 0.874388
| 0.862617
| 0.84582
| 0
| 0.010417
| 0.384943
| 147,957
| 2,916
| 764
| 50.739712
| 0.756972
| 0.047602
| 0
| 0.842186
| 0
| 0.040801
| 0.32765
| 0.018672
| 0
| 0
| 0
| 0.001372
| 0
| 1
| 0.011162
| false
| 0.00154
| 0.007698
| 0.000385
| 0.048884
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86fe33cd8857890f8e4a788280e9599b12ece0f5
| 11,428
|
py
|
Python
|
tests/test_us/tests.py
|
MehdioKhan/django-localflavor
|
7cb223bf801ebc7659cc314a8a870e47e5004488
|
[
"BSD-3-Clause"
] | 1
|
2018-11-28T22:08:17.000Z
|
2018-11-28T22:08:17.000Z
|
tests/test_us/tests.py
|
DalavanCloud/django-localflavor
|
b78df3bbfa5e07e3f6b78a09d43c45eb39fa1196
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_us/tests.py
|
DalavanCloud/django-localflavor
|
b78df3bbfa5e07e3f6b78a09d43c45eb39fa1196
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import unicode_literals
from django.test import TestCase
from localflavor.us import forms
from .forms import USPlaceForm
class USLocalFlavorTests(TestCase):
def setUp(self):
self.form = USPlaceForm({
'state': 'GA',
'state_req': 'NC',
'postal_code': 'GA',
'name': 'impossible',
'zip_code': '12345',
})
def test_get_display_methods(self):
"""Test that the get_*_display() methods are added to the model instances."""
place = self.form.save()
self.assertEqual(place.get_state_display(), 'Georgia')
self.assertEqual(place.get_state_req_display(), 'North Carolina')
def test_required(self):
"""Test that required USStateFields throw appropriate errors."""
form = USPlaceForm({'state': 'GA', 'name': 'Place in GA'})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['state_req'], ['This field is required.'])
def test_errors(self):
form = USPlaceForm({
'state': 'invalid',
'state_req': 'invalid',
'postal_code': 'invalid',
'name': 'name',
'ssn': 'invalid',
'zip_code': 'invalid'
})
self.assertFalse(form.is_valid())
choice_messages = ['Select a valid choice. invalid is not one of the available choices.']
self.assertEqual(form.errors['state'], choice_messages)
self.assertEqual(form.errors['state_req'], choice_messages)
self.assertEqual(form.errors['postal_code'], choice_messages)
self.assertEqual(form.errors['ssn'], ['Enter a valid U.S. Social Security number in XXX-XX-XXXX format.'])
self.assertEqual(form.errors['zip_code'], ['Enter a zip code in the format XXXXX or XXXXX-XXXX.'])
def test_field_blank_option(self):
"""Test that the empty option is there."""
state_select_html = """\
<select name="state" id="id_state">
<option value="">---------</option>
<option value="AL">Alabama</option>
<option value="AK">Alaska</option>
<option value="AS">American Samoa</option>
<option value="AZ">Arizona</option>
<option value="AR">Arkansas</option>
<option value="AA">Armed Forces Americas</option>
<option value="AE">Armed Forces Europe</option>
<option value="AP">Armed Forces Pacific</option>
<option value="CA">California</option>
<option value="CO">Colorado</option>
<option value="CT">Connecticut</option>
<option value="DE">Delaware</option>
<option value="DC">District of Columbia</option>
<option value="FL">Florida</option>
<option value="GA" selected="selected">Georgia</option>
<option value="GU">Guam</option>
<option value="HI">Hawaii</option>
<option value="ID">Idaho</option>
<option value="IL">Illinois</option>
<option value="IN">Indiana</option>
<option value="IA">Iowa</option>
<option value="KS">Kansas</option>
<option value="KY">Kentucky</option>
<option value="LA">Louisiana</option>
<option value="ME">Maine</option>
<option value="MD">Maryland</option>
<option value="MA">Massachusetts</option>
<option value="MI">Michigan</option>
<option value="MN">Minnesota</option>
<option value="MS">Mississippi</option>
<option value="MO">Missouri</option>
<option value="MT">Montana</option>
<option value="NE">Nebraska</option>
<option value="NV">Nevada</option>
<option value="NH">New Hampshire</option>
<option value="NJ">New Jersey</option>
<option value="NM">New Mexico</option>
<option value="NY">New York</option>
<option value="NC">North Carolina</option>
<option value="ND">North Dakota</option>
<option value="MP">Northern Mariana Islands</option>
<option value="OH">Ohio</option>
<option value="OK">Oklahoma</option>
<option value="OR">Oregon</option>
<option value="PA">Pennsylvania</option>
<option value="PR">Puerto Rico</option>
<option value="RI">Rhode Island</option>
<option value="SC">South Carolina</option>
<option value="SD">South Dakota</option>
<option value="TN">Tennessee</option>
<option value="TX">Texas</option>
<option value="UT">Utah</option>
<option value="VT">Vermont</option>
<option value="VI">Virgin Islands</option>
<option value="VA">Virginia</option>
<option value="WA">Washington</option>
<option value="WV">West Virginia</option>
<option value="WI">Wisconsin</option>
<option value="WY">Wyoming</option>
</select>"""
self.assertHTMLEqual(str(self.form['state']), state_select_html)
def test_full_postal_code_list(self):
"""Test that the full USPS code field is really the full list."""
usps_select_html = """\
<select name="postal_code" id="id_postal_code">
<option value="">---------</option>
<option value="AL">Alabama</option>
<option value="AK">Alaska</option>
<option value="AS">American Samoa</option>
<option value="AZ">Arizona</option>
<option value="AR">Arkansas</option>
<option value="AA">Armed Forces Americas</option>
<option value="AE">Armed Forces Europe</option>
<option value="AP">Armed Forces Pacific</option>
<option value="CA">California</option>
<option value="CO">Colorado</option>
<option value="CT">Connecticut</option>
<option value="DE">Delaware</option>
<option value="DC">District of Columbia</option>
<option value="FM">Federated States of Micronesia</option>
<option value="FL">Florida</option>
<option value="GA" selected="selected">Georgia</option>
<option value="GU">Guam</option>
<option value="HI">Hawaii</option>
<option value="ID">Idaho</option>
<option value="IL">Illinois</option>
<option value="IN">Indiana</option>
<option value="IA">Iowa</option>
<option value="KS">Kansas</option>
<option value="KY">Kentucky</option>
<option value="LA">Louisiana</option>
<option value="ME">Maine</option>
<option value="MH">Marshall Islands</option>
<option value="MD">Maryland</option>
<option value="MA">Massachusetts</option>
<option value="MI">Michigan</option>
<option value="MN">Minnesota</option>
<option value="MS">Mississippi</option>
<option value="MO">Missouri</option>
<option value="MT">Montana</option>
<option value="NE">Nebraska</option>
<option value="NV">Nevada</option>
<option value="NH">New Hampshire</option>
<option value="NJ">New Jersey</option>
<option value="NM">New Mexico</option>
<option value="NY">New York</option>
<option value="NC">North Carolina</option>
<option value="ND">North Dakota</option>
<option value="MP">Northern Mariana Islands</option>
<option value="OH">Ohio</option>
<option value="OK">Oklahoma</option>
<option value="OR">Oregon</option>
<option value="PW">Palau</option>
<option value="PA">Pennsylvania</option>
<option value="PR">Puerto Rico</option>
<option value="RI">Rhode Island</option>
<option value="SC">South Carolina</option>
<option value="SD">South Dakota</option>
<option value="TN">Tennessee</option>
<option value="TX">Texas</option>
<option value="UT">Utah</option>
<option value="VT">Vermont</option>
<option value="VI">Virgin Islands</option>
<option value="VA">Virginia</option>
<option value="WA">Washington</option>
<option value="WV">West Virginia</option>
<option value="WI">Wisconsin</option>
<option value="WY">Wyoming</option>
</select>"""
self.assertHTMLEqual(str(self.form['postal_code']), usps_select_html)
def test_USStateSelect(self):
f = forms.USStateSelect()
out = '''<select name="state">
<option value="AL">Alabama</option>
<option value="AK">Alaska</option>
<option value="AS">American Samoa</option>
<option value="AZ">Arizona</option>
<option value="AR">Arkansas</option>
<option value="AA">Armed Forces Americas</option>
<option value="AE">Armed Forces Europe</option>
<option value="AP">Armed Forces Pacific</option>
<option value="CA">California</option>
<option value="CO">Colorado</option>
<option value="CT">Connecticut</option>
<option value="DE">Delaware</option>
<option value="DC">District of Columbia</option>
<option value="FL">Florida</option>
<option value="GA">Georgia</option>
<option value="GU">Guam</option>
<option value="HI">Hawaii</option>
<option value="ID">Idaho</option>
<option value="IL" selected="selected">Illinois</option>
<option value="IN">Indiana</option>
<option value="IA">Iowa</option>
<option value="KS">Kansas</option>
<option value="KY">Kentucky</option>
<option value="LA">Louisiana</option>
<option value="ME">Maine</option>
<option value="MD">Maryland</option>
<option value="MA">Massachusetts</option>
<option value="MI">Michigan</option>
<option value="MN">Minnesota</option>
<option value="MS">Mississippi</option>
<option value="MO">Missouri</option>
<option value="MT">Montana</option>
<option value="NE">Nebraska</option>
<option value="NV">Nevada</option>
<option value="NH">New Hampshire</option>
<option value="NJ">New Jersey</option>
<option value="NM">New Mexico</option>
<option value="NY">New York</option>
<option value="NC">North Carolina</option>
<option value="ND">North Dakota</option>
<option value="MP">Northern Mariana Islands</option>
<option value="OH">Ohio</option>
<option value="OK">Oklahoma</option>
<option value="OR">Oregon</option>
<option value="PA">Pennsylvania</option>
<option value="PR">Puerto Rico</option>
<option value="RI">Rhode Island</option>
<option value="SC">South Carolina</option>
<option value="SD">South Dakota</option>
<option value="TN">Tennessee</option>
<option value="TX">Texas</option>
<option value="UT">Utah</option>
<option value="VT">Vermont</option>
<option value="VI">Virgin Islands</option>
<option value="VA">Virginia</option>
<option value="WA">Washington</option>
<option value="WV">West Virginia</option>
<option value="WI">Wisconsin</option>
<option value="WY">Wyoming</option>
</select>'''
self.assertHTMLEqual(f.render('state', 'IL'), out)
def test_USZipCodeField(self):
error_format = ['Enter a zip code in the format XXXXX or XXXXX-XXXX.']
valid = {
'60606': '60606',
60606: '60606',
'04000': '04000',
' 04000 ': '04000',
'60606-1234': '60606-1234',
}
invalid = {
'4000': error_format,
'6060-1234': error_format,
'60606-': error_format,
}
self.assertFieldOutput(forms.USZipCodeField, valid, invalid)
def test_USZipCodeField_formfield(self):
"""Test that the full US ZIP code field is really the full list."""
self.assertHTMLEqual(str(self.form['zip_code']),
'<input id="id_zip_code" maxlength="10" name="zip_code" type="text" value="12345" />')
def test_USStateField(self):
error_invalid = ['Enter a U.S. state or territory.']
valid = {
'il': 'IL',
'IL': 'IL',
'illinois': 'IL',
' illinois ': 'IL',
}
invalid = {
60606: error_invalid,
}
self.assertFieldOutput(forms.USStateField, valid, invalid)
def test_USSocialSecurityNumberField(self):
error_invalid = ['Enter a valid U.S. Social Security number in XXX-XX-XXXX format.']
valid = {
'123-45-6789': '123-45-6789',
'123456789': '123-45-6789',
}
invalid = {
'078-05-1120': error_invalid,
'078051120': error_invalid,
'900-12-3456': error_invalid,
'900123456': error_invalid,
'999-98-7652': error_invalid,
'999987652': error_invalid,
}
self.assertFieldOutput(forms.USSocialSecurityNumberField, valid, invalid)
| 37.84106
| 115
| 0.676146
| 1,467
| 11,428
| 5.215406
| 0.174506
| 0.261665
| 0.397726
| 0.021958
| 0.774931
| 0.733107
| 0.712456
| 0.705137
| 0.705137
| 0.705137
| 0
| 0.018579
| 0.142807
| 11,428
| 301
| 116
| 37.966777
| 0.762454
| 0.025289
| 0
| 0.691756
| 0
| 0.003584
| 0.733105
| 0.314047
| 0
| 0
| 0
| 0
| 0.060932
| 1
| 0.039427
| false
| 0
| 0.014337
| 0
| 0.057348
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8102d916c7417017126a5788f222b57c087c9f8d
| 113
|
py
|
Python
|
stockzie/strategy/__init__.py
|
amozie/amozie
|
fb7c16ce537bc5567f9c87cfc22c564a4dffc4ef
|
[
"Apache-2.0"
] | null | null | null |
stockzie/strategy/__init__.py
|
amozie/amozie
|
fb7c16ce537bc5567f9c87cfc22c564a4dffc4ef
|
[
"Apache-2.0"
] | null | null | null |
stockzie/strategy/__init__.py
|
amozie/amozie
|
fb7c16ce537bc5567f9c87cfc22c564a4dffc4ef
|
[
"Apache-2.0"
] | null | null | null |
from stockzie.strategy.BaseStrategy import BaseStrategy
from stockzie.strategy.Ma520Strategy import Ma520Strategy
| 56.5
| 57
| 0.902655
| 12
| 113
| 8.5
| 0.5
| 0.235294
| 0.392157
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056604
| 0.061947
| 113
| 2
| 57
| 56.5
| 0.90566
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
811277c093dbcf243161bbbf3179b962a83db4b6
| 3,032
|
py
|
Python
|
tests/unit/actions/conftest.py
|
mascheck/hcloud-python
|
ffbfd32418c191676ce7f03a5e384bb123eb904a
|
[
"MIT"
] | 2
|
2020-12-11T18:09:44.000Z
|
2020-12-12T05:53:22.000Z
|
tests/unit/actions/conftest.py
|
mascheck/hcloud-python
|
ffbfd32418c191676ce7f03a5e384bb123eb904a
|
[
"MIT"
] | null | null | null |
tests/unit/actions/conftest.py
|
mascheck/hcloud-python
|
ffbfd32418c191676ce7f03a5e384bb123eb904a
|
[
"MIT"
] | 1
|
2019-06-19T17:53:10.000Z
|
2019-06-19T17:53:10.000Z
|
import pytest
@pytest.fixture()
def generic_action_list():
return {
"actions": [
{
"id": 1,
"command": "start_server",
"status": "success",
"progress": 100,
"started": "2016-01-30T23:55:00+00:00",
"finished": "2016-01-30T23:56:00+00:00",
"resources": [
{
"id": 42,
"type": "server"
}
],
"error": {
"code": "action_failed",
"message": "Action failed"
}
},
{
"id": 2,
"command": "stop_server",
"status": "success",
"progress": 100,
"started": "2016-01-30T23:55:00+00:00",
"finished": "2016-01-30T23:56:00+00:00",
"resources": [
{
"id": 42,
"type": "server"
}
],
"error": {
"code": "action_failed",
"message": "Action failed"
}
}
]
}
@pytest.fixture()
def running_action():
return {
"action": {
"id": 2,
"command": "stop_server",
"status": "running",
"progress": 100,
"started": "2016-01-30T23:55:00+00:00",
"finished": "2016-01-30T23:56:00+00:00",
"resources": [
{
"id": 42,
"type": "server"
}
],
"error": {
"code": "action_failed",
"message": "Action failed"
}
}
}
@pytest.fixture()
def successfully_action():
return {
"action": {
"id": 2,
"command": "stop_server",
"status": "success",
"progress": 100,
"started": "2016-01-30T23:55:00+00:00",
"finished": "2016-01-30T23:56:00+00:00",
"resources": [
{
"id": 42,
"type": "server"
}
],
"error": {
"code": "action_failed",
"message": "Action failed"
}
}
}
@pytest.fixture()
def failed_action():
return {
"action": {
"id": 2,
"command": "stop_server",
"status": "error",
"progress": 100,
"started": "2016-01-30T23:55:00+00:00",
"finished": "2016-01-30T23:56:00+00:00",
"resources": [
{
"id": 42,
"type": "server"
}
],
"error": {
"code": "action_failed",
"message": "Action failed"
}
}
}
| 25.694915
| 56
| 0.329485
| 216
| 3,032
| 4.555556
| 0.171296
| 0.081301
| 0.111789
| 0.111789
| 0.888211
| 0.888211
| 0.888211
| 0.888211
| 0.888211
| 0.780488
| 0
| 0.145329
| 0.523417
| 3,032
| 117
| 57
| 25.91453
| 0.53564
| 0
| 0
| 0.66055
| 0
| 0
| 0.28661
| 0.082454
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036697
| true
| 0
| 0.009174
| 0.036697
| 0.082569
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
814385fa659d6706f57b0a753a9bb46550f4b48c
| 12,599
|
py
|
Python
|
wBDA.py
|
chenjiehu/MGCN_ODA
|
311a71444d6e22d4049d7356c1aa31584857006c
|
[
"MIT"
] | null | null | null |
wBDA.py
|
chenjiehu/MGCN_ODA
|
311a71444d6e22d4049d7356c1aa31584857006c
|
[
"MIT"
] | null | null | null |
wBDA.py
|
chenjiehu/MGCN_ODA
|
311a71444d6e22d4049d7356c1aa31584857006c
|
[
"MIT"
] | null | null | null |
import torch
from sklearn import metrics
import sklearn.metrics
import numpy as np
from predict_A_distance import proxy_a_distance
from wMMD import mmd
def kernel(ker, X1, X2, gamma):
K = None
if not ker or ker == 'primal':
K = X1
elif ker == 'linear':
if X2 is not None:
K = sklearn.metrics.pairwise.linear_kernel(
np.asarray(X1), np.asarray(X2))
else:
K = sklearn.metrics.pairwise.linear_kernel(np.asarray(X1).T)
elif ker == 'rbf':
if X2 is not None:
K = sklearn.metrics.pairwise.rbf_kernel(
np.asarray(X1), np.asarray(X2), gamma)
else:
K = sklearn.metrics.pairwise.rbf_kernel(
np.asarray(X1), None, gamma)
return K
def guassian_kernel(source, target, kernel_mul=2.0, kernel_num=5, fix_sigma=None):
n_samples = int(source.size()[0]) + int(target.size()[0])
total = torch.cat([source, target], dim=0)
total0 = total.unsqueeze(0).expand(int(total.size(0)), \
int(total.size(0)), \
int(total.size(1)))
total1 = total.unsqueeze(1).expand(int(total.size(0)), \
int(total.size(0)), \
int(total.size(1)))
L2_distance = ((total0 - total1) ** 2).sum(2)
if fix_sigma:
bandwidth = fix_sigma
else:
bandwidth = torch.sum(L2_distance.data) / (n_samples ** 2 - n_samples)
bandwidth /= kernel_mul ** (kernel_num // 2)
bandwidth_list = [bandwidth * (kernel_mul ** i) for i in range(kernel_num)]
kernel_val = [torch.exp(-L2_distance / bandwidth_temp) for \
bandwidth_temp in bandwidth_list]
return sum(kernel_val)
def wmmd(source, target, target_pred, kernel_mul=2.0, kernel_num=5, fix_sigma=None):
length_source = int(source.size()[0])
kernels = guassian_kernel(source, target,
kernel_mul=kernel_mul,
kernel_num=kernel_num,
fix_sigma=fix_sigma)
length_pred = int(target_pred.size()[0])
length_target = int(target.size()[0])
loss = 0
if length_pred == length_target:
Pi = torch.sum(target_pred)
target_pred_matrix = target_pred.unsqueeze(1)
Mtt = torch.mm(target_pred_matrix, target_pred_matrix.t())/Pi**2
target_pred_matrix = target_pred.repeat(length_source,1)
Mst = (-1/(Pi*length_source))*torch.mul(torch.ones(length_source, length_target).cuda(),target_pred_matrix)
Mss = (1/length_source**2)*torch.ones(length_source, length_source).cuda()
M1 = torch.cat([Mss,Mst],dim = 1)
M2 = torch.cat([Mst.t(),Mtt], dim = 1)
M = torch.cat([M1,M2],dim = 0)
loss = torch.trace(torch.mm(kernels.float(),M))
return loss
#加权BDA算法
def BDA(source, source_label ,target, target_pred_label):
loss = 0
length_source = int(source.size()[0])
length_target = int(target.size()[0])
length_common = min(length_source, length_target)
length_pred_label = int(target_pred_label.size()[0])
length_source_label = int(source_label.size()[0])
n = length_source + length_target
if (length_source == length_source_label) & (length_pred_label == length_target):
#首先计算条件分布差异
C = len(np.unique(source_label))
for c in range(C):
e = np.zeros((n, 1))
Ns = len(source_label[np.where(source_label == c)])
Nt = len(target_pred_label[np.where(target_pred_label == c)])
if (Nt > 1) & (Ns > 1):
tt = source_label == c
e[np.where(tt == True)] = 1 / Ns
ind = np.where(tt == True)
inds_source = [item for item in ind]
yy = target_pred_label == c
ind = np.where(yy == True)
inds_target = [item for item in ind]
e[tuple(inds_target)] = -1 / Nt
e[np.isinf(e)] = 0
source_c = source[inds_source,:]
source_c = np.squeeze(source_c)
target_c = target[inds_target,:]
target_c = np.squeeze(target_c)
loss_c = mmd(source_c,target_c)
loss = loss + loss_c
#计算边缘分布
loss_B = mmd(source[:length_common], target[:length_common])
loss_all = 1/C*loss + loss_B
else:
print('imput length eooro!')
return loss_all
def wBDA(source, source_label ,target, target_pred_label, target_pred_p):
loss = 0
mu = 0.1
length_source = int(source.size()[0])
length_target = int(target.size()[0])
length_pred_p = int(target_pred_p.size()[0])
length_pred_label = int(target_pred_label.size()[0])
length_source_label = int(source_label.size()[0])
n = length_source + length_target
if (length_source == length_source_label) & (length_pred_label == length_target):
C = len(np.unique(source_label))
for c in range(C):
e = np.zeros((n, 1))
Ns = len(source_label[np.where(source_label == c)])
Nt = len(target_pred_label[np.where(target_pred_label == c)])
if (Nt > 1) & (Ns > 1):
tt = source_label == c
e[np.where(tt == True)] = 1 / Ns
ind = np.where(tt == True)
inds_source = [item for item in ind]
yy = target_pred_label == c
ind = np.where(yy == True)
inds_target = [item for item in ind]
e[tuple(inds_target)] = -1 / Nt
e[np.isinf(e)] = 0
source_c = source[inds_source,:]
source_c = np.squeeze(source_c)
target_c = target[inds_target,:]
target_c = np.squeeze(target_c)
target_pred_c = target_pred_p[inds_target]
loss_c = wmmd(source_c,target_c,target_pred_c)
loss = loss + loss_c
#计算边缘分布
loss_B = wmmd(source, target, target_pred_p)
#loss_all = 1/C*loss + loss_B
loss_all = (1 - mu) * loss / C + mu * loss_B
else:
print('imput length eooro!')
return loss_all
def coral_wBDA(source, source_label ,target, target_pred_label, target_pred_p, coral_W):
loss = 0
loss_all = 0
mu = 0.1
length_source = int(source.size()[0])
length_target = int(target.size()[0])
length_pred_p = int(target_pred_p.size()[0])
length_pred_label = int(target_pred_label.size()[0])
length_source_label = int(source_label.size()[0])
n = length_source + length_target
if (length_source == length_source_label) & (length_pred_label == length_target):
C = len(np.unique(source_label))
if C != coral_W.size(0):
coral_W = torch.ones(C)
for c in range(C):
e = np.zeros((n, 1))
Ns = len(source_label[np.where(source_label == c)])
Nt = len(target_pred_label[np.where(target_pred_label == c)])
if (Nt > 1) & (Ns > 1):
tt = source_label == c
e[np.where(tt == True)] = 1 / Ns
ind = np.where(tt == True)
inds_source = [item for item in ind]
yy = target_pred_label == c
ind = np.where(yy == True)
inds_target = [item for item in ind]
e[tuple(inds_target)] = -1 / Nt
e[np.isinf(e)] = 0
source_c = source[inds_source,:]
source_c = np.squeeze(source_c)
target_c = target[inds_target,:]
target_c = np.squeeze(target_c)
target_pred_c = target_pred_p[inds_target]
loss_c = wmmd(source_c,target_c,target_pred_c)
loss = loss + loss_c * coral_W[c]
loss_B = wmmd(source, target, target_pred_p)
loss_all = (1-mu) * loss/C + mu * loss_B
else:
print('imput length error!')
return loss_all
def coral_wBDA_A_distance(source, source_label ,target, target_pred_label, target_pred_p, coral_W):
loss = 0
length_source = int(source.size()[0])
length_target = int(target.size()[0])
length_pred_p = int(target_pred_p.size()[0])
length_pred_label = int(target_pred_label.size()[0])
length_source_label = int(source_label.size()[0])
n = length_source + length_target
dc = 0
if (length_source == length_source_label) & (length_pred_label == length_target):
C = len(np.unique(source_label))
if C != coral_W.size(0):
coral_W = torch.ones(C)
for c in range(C):
e = np.zeros((n, 1))
Ns = len(source_label[np.where(source_label == c)])
Nt = len(target_pred_label[np.where(target_pred_label == c)])
if (Nt > 1) & (Ns > 1):
tt = source_label == c
e[np.where(tt == True)] = 1 / Ns
ind = np.where(tt == True)
inds_source = [item for item in ind]
yy = target_pred_label == c
ind = np.where(yy == True)
inds_target = [item for item in ind]
e[tuple(inds_target)] = -1 / Nt
e[np.isinf(e)] = 0
source_c = source[inds_source,:]
source_c = np.squeeze(source_c)
target_c = target[inds_target,:]
target_c = np.squeeze(target_c)
target_pred_c = target_pred_p[inds_target]
loss_c = wmmd(source_c,target_c,target_pred_c)
dc = dc + proxy_a_distance(source_c.cpu().detach().numpy(), target_c.cpu().detach().numpy())
loss = loss + loss_c * coral_W[c]
loss_B = wmmd(source, target, target_pred_p)
dm = proxy_a_distance(source.cpu().detach().numpy(),target.cpu().detach().numpy())
mu = 1 - dm/(dm+dc)
loss_all = (1-mu)*1/C*loss + mu*loss_B
loss_all = 1.6 * loss_all
else:
print('imput length eooro!')
return loss_all
def aifa_wBDA_A_distance(source, source_label ,target, target_pred_label, target_pred_p, coral_W):
loss = 0
length_source = int(source.size()[0])
length_target = int(target.size()[0])
length_pred_p = int(target_pred_p.size()[0])
length_pred_label = int(target_pred_label.size()[0])
length_source_label = int(source_label.size()[0])
n = length_source + length_target
dc = 0
if (length_source == length_source_label) & (length_pred_label == length_target):
C = len(np.unique(source_label))
if C != coral_W.size(0):
coral_W = torch.ones(C)
for c in range(C):
e = np.zeros((n, 1))
Ns = len(source_label[np.where(source_label == c)])
Nt = len(target_pred_label[np.where(target_pred_label == c)])
if (Nt > 1) & (Ns > 1):
tt = source_label == c
e[np.where(tt == True)] = 1 / Ns
ind = np.where(tt == True)
inds_source = [item for item in ind]
yy = target_pred_label == c
ind = np.where(yy == True)
inds_target = [item for item in ind]
e[tuple(inds_target)] = -1 / Nt
e[np.isinf(e)] = 0
source_c = source[inds_source,:]
source_c = np.squeeze(source_c)
target_c = target[inds_target,:]
target_c = np.squeeze(target_c)
target_pred_c = target_pred_p[inds_target]
loss_c = wmmd(source_c,target_c,target_pred_c)
dc = dc + proxy_a_distance(source_c.cpu().detach().numpy(), target_c.cpu().detach().numpy())
loss = loss + loss_c * coral_W[c]
loss_B = wmmd(source, target, target_pred_p)
dm = proxy_a_distance(source.cpu().detach().numpy(),target.cpu().detach().numpy())
mu = 1 - dm/(dm+dc)
loss_all = (1-mu)*1/C*loss + mu*loss_B
loss_all = 1.6 * loss_all
else:
print('imput length eooro!')
return loss_all
if __name__ == "__main__":
import numpy as np
p1 = 1 * torch.ones(40)
p2 = 0.5 * torch.ones(60)
p = torch.cat([p1,p2])
source_label = torch.tensor([1,1])
target_label = torch.tensor([1,0,1])
target_pred = torch.tensor([0.3,0.6,0.1]).cuda()
data_1 = torch.tensor(np.random.normal(0, 10, (2, 50))).cuda()
data_2 = torch.tensor(np.random.normal(5, 10, (3, 50))).cuda()
loss_mmd = wmmd(data_1, data_2, target_pred)
| 36.625
| 115
| 0.561235
| 1,752
| 12,599
| 3.789384
| 0.079909
| 0.091881
| 0.056484
| 0.0241
| 0.803585
| 0.77105
| 0.754029
| 0.741527
| 0.732189
| 0.701009
| 0
| 0.020411
| 0.311691
| 12,599
| 343
| 116
| 36.731778
| 0.745157
| 0.004524
| 0
| 0.74359
| 0
| 0
| 0.009413
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029304
| false
| 0
| 0.025641
| 0
| 0.084249
| 0.018315
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d4b4820fbe9a213a29610ac7f91fb7bf016a4e6c
| 9,019
|
py
|
Python
|
dbaas/workflow/steps/mysql/flipperfox_migration/acl_database_bind.py
|
jaeko44/python_dbaas
|
4fafa4ad70200fec1436c326c751761922ec9fa8
|
[
"BSD-3-Clause"
] | null | null | null |
dbaas/workflow/steps/mysql/flipperfox_migration/acl_database_bind.py
|
jaeko44/python_dbaas
|
4fafa4ad70200fec1436c326c751761922ec9fa8
|
[
"BSD-3-Clause"
] | null | null | null |
dbaas/workflow/steps/mysql/flipperfox_migration/acl_database_bind.py
|
jaeko44/python_dbaas
|
4fafa4ad70200fec1436c326c751761922ec9fa8
|
[
"BSD-3-Clause"
] | 1
|
2017-07-02T08:46:17.000Z
|
2017-07-02T08:46:17.000Z
|
# -*- coding: utf-8 -*-
import logging
from util import full_stack
from util import get_credentials_for
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0020
from dbaas_credentials.models import CredentialType
from dbaas_cloudstack.models import DatabaseInfraAttr
from dbaas_aclapi.models import DatabaseInfraInstanceBind
from dbaas_aclapi.acl_base_client import AclClient
from dbaas_aclapi import helpers
from dbaas_aclapi.models import ERROR
LOG = logging.getLogger(__name__)
class BindNewInstances(BaseStep):
def __unicode__(self):
return "Binding new instances ..."
def do(self, workflow_dict):
try:
database = workflow_dict['database']
databaseinfra = workflow_dict['databaseinfra']
acl_credential = get_credentials_for(environment=database.environment,
credential_type=CredentialType.ACLAPI)
acl_client = AclClient(acl_credential.endpoint,
acl_credential.user,
acl_credential.password,
database.environment)
instances = databaseinfra.instances.filter(
future_instance__isnull=True)
databaseinfra_vips = databaseinfra.vip_databaseinfra.all()
instance_address_list = []
for instance in instances:
instance_address_list.append(instance.address)
for vip in databaseinfra_vips:
instance_address_list.append(vip.vip_ip)
for database_bind in database.acl_binds.all():
if helpers.bind_address(database_bind=database_bind,
acl_client=acl_client,
instances=instances,
infra_attr_instances=[],
infra_vips=databaseinfra_vips):
continue
else:
LOG.error("The AclApi is not working properly.")
database_bind.bind_status = ERROR
database_bind.save()
DatabaseInfraInstanceBind.objects.filter(databaseinfra=databaseinfra,
bind_address=database_bind.bind_address,
instance__in=instance_address_list
).update(bind_status=ERROR)
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
database = workflow_dict['database']
databaseinfra = workflow_dict['databaseinfra']
acl_credential = get_credentials_for(environment=database.environment,
credential_type=CredentialType.ACLAPI)
acl_client = AclClient(acl_credential.endpoint,
acl_credential.user,
acl_credential.password,
database.environment)
instances = databaseinfra.instances.filter(
future_instance__isnull=True)
databaseinfra_vips = databaseinfra.vip_databaseinfra.all()
instance_address_list = []
for instance in instances:
instance_address_list.append(instance.address)
for vip in databaseinfra_vips:
instance_address_list.append(vip.vip_ip)
for database_bind in database.acl_binds.all():
infra_instances_binds = DatabaseInfraInstanceBind.objects.filter(
databaseinfra=databaseinfra,
instance__in=instance_address_list,
bind_address=database_bind.bind_address)
if helpers.unbind_address(
database_bind, acl_client, infra_instances_binds, False
):
continue
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
class UnbindOldInstances(BaseStep):
def __unicode__(self):
return "Unbinding old instances ..."
def do(self, workflow_dict):
try:
database = workflow_dict['database']
databaseinfra = workflow_dict['databaseinfra']
acl_credential = get_credentials_for(environment=database.environment,
credential_type=CredentialType.ACLAPI)
acl_client = AclClient(acl_credential.endpoint,
acl_credential.user,
acl_credential.password,
database.environment)
instances = databaseinfra.instances.filter(
future_instance__isnull=False)
databaseinfraattr_instances = DatabaseInfraAttr.objects.filter(databaseinfra=databaseinfra,
equivalent_dbinfraattr__isnull=False)
instance_address_list = []
for instance in instances:
instance_address_list.append(instance.address)
for instance in databaseinfraattr_instances:
instance_address_list.append(instance.ip)
for database_bind in database.acl_binds.all():
infra_instances_binds = DatabaseInfraInstanceBind.objects.filter(
databaseinfra=databaseinfra,
instance__in=instance_address_list,
bind_address=database_bind.bind_address)
if helpers.unbind_address(
database_bind, acl_client, infra_instances_binds, False
):
continue
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
database = workflow_dict['database']
databaseinfra = workflow_dict['databaseinfra']
acl_credential = get_credentials_for(environment=database.environment,
credential_type=CredentialType.ACLAPI)
acl_client = AclClient(acl_credential.endpoint,
acl_credential.user,
acl_credential.password,
database.environment)
instances = databaseinfra.instances.filter(
future_instance__isnull=False)
databaseinfraattr_instances = DatabaseInfraAttr.objects.filter(databaseinfra=databaseinfra,
equivalent_dbinfraattr__isnull=False)
instance_address_list = []
for instance in instances:
instance_address_list.append(instance.address)
for instance in databaseinfraattr_instances:
instance_address_list.append(instance.ip)
for database_bind in database.acl_binds.all():
if helpers.bind_address(database_bind=database_bind,
acl_client=acl_client,
instances=instances,
infra_attr_instances=databaseinfraattr_instances,
infra_vips=[]):
continue
else:
LOG.error("The AclApi is not working properly.")
database_bind.bind_status = ERROR
database_bind.save()
DatabaseInfraInstanceBind.objects.filter(databaseinfra=databaseinfra,
bind_address=database_bind.bind_address,
instance__in=instance_address_list
).update(bind_status=ERROR)
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
| 41.948837
| 112
| 0.557157
| 730
| 9,019
| 6.583562
| 0.131507
| 0.049938
| 0.063254
| 0.041615
| 0.895963
| 0.873075
| 0.873075
| 0.873075
| 0.873075
| 0.873075
| 0
| 0.003776
| 0.383302
| 9,019
| 214
| 113
| 42.14486
| 0.860302
| 0.002328
| 0
| 0.879518
| 0
| 0
| 0.04402
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036145
| false
| 0.024096
| 0.066265
| 0.012048
| 0.174699
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be0a5ece4a96c29e5806b7110928eb7eed866a04
| 65,341
|
py
|
Python
|
airavata-api/airavata-client-sdks/airavata-python-sdk/airavata/service/profile/tenant/cpi/TenantProfileService.py
|
docquantum/airavata
|
4ec5fa0aab1b75ca1e98a16648c57cd8abdb4b9c
|
[
"ECL-2.0",
"Apache-2.0"
] | 74
|
2015-04-10T02:57:26.000Z
|
2022-02-28T16:10:03.000Z
|
airavata-api/airavata-client-sdks/airavata-python-sdk/airavata/service/profile/tenant/cpi/TenantProfileService.py
|
docquantum/airavata
|
4ec5fa0aab1b75ca1e98a16648c57cd8abdb4b9c
|
[
"ECL-2.0",
"Apache-2.0"
] | 126
|
2015-04-26T02:55:26.000Z
|
2022-02-16T22:43:28.000Z
|
airavata-api/airavata-client-sdks/airavata-python-sdk/airavata/service/profile/tenant/cpi/TenantProfileService.py
|
docquantum/airavata
|
4ec5fa0aab1b75ca1e98a16648c57cd8abdb4b9c
|
[
"ECL-2.0",
"Apache-2.0"
] | 163
|
2015-01-22T14:05:24.000Z
|
2022-03-17T12:24:34.000Z
|
#
# Autogenerated by Thrift Compiler (0.10.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
import sys
import airavata.base.api.BaseAPI
import logging
from .ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
class Iface(airavata.base.api.BaseAPI.Iface):
def addGateway(self, authzToken, gateway):
"""
Return the airavataInternalGatewayId assigned to given gateway.
Parameters:
- authzToken
- gateway
"""
pass
def updateGateway(self, authzToken, updatedGateway):
"""
Parameters:
- authzToken
- updatedGateway
"""
pass
def getGateway(self, authzToken, airavataInternalGatewayId):
"""
Parameters:
- authzToken
- airavataInternalGatewayId
"""
pass
def deleteGateway(self, authzToken, airavataInternalGatewayId, gatewayId):
"""
Parameters:
- authzToken
- airavataInternalGatewayId
- gatewayId
"""
pass
def getAllGateways(self, authzToken):
"""
Parameters:
- authzToken
"""
pass
def isGatewayExist(self, authzToken, gatewayId):
"""
Parameters:
- authzToken
- gatewayId
"""
pass
def getAllGatewaysForUser(self, authzToken, requesterUsername):
"""
Parameters:
- authzToken
- requesterUsername
"""
pass
class Client(airavata.base.api.BaseAPI.Client, Iface):
def __init__(self, iprot, oprot=None):
airavata.base.api.BaseAPI.Client.__init__(self, iprot, oprot)
def addGateway(self, authzToken, gateway):
"""
Return the airavataInternalGatewayId assigned to given gateway.
Parameters:
- authzToken
- gateway
"""
self.send_addGateway(authzToken, gateway)
return self.recv_addGateway()
def send_addGateway(self, authzToken, gateway):
self._oprot.writeMessageBegin('addGateway', TMessageType.CALL, self._seqid)
args = addGateway_args()
args.authzToken = authzToken
args.gateway = gateway
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_addGateway(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = addGateway_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.tpe is not None:
raise result.tpe
if result.ae is not None:
raise result.ae
raise TApplicationException(TApplicationException.MISSING_RESULT, "addGateway failed: unknown result")
def updateGateway(self, authzToken, updatedGateway):
"""
Parameters:
- authzToken
- updatedGateway
"""
self.send_updateGateway(authzToken, updatedGateway)
return self.recv_updateGateway()
def send_updateGateway(self, authzToken, updatedGateway):
self._oprot.writeMessageBegin('updateGateway', TMessageType.CALL, self._seqid)
args = updateGateway_args()
args.authzToken = authzToken
args.updatedGateway = updatedGateway
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_updateGateway(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = updateGateway_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.tpe is not None:
raise result.tpe
if result.ae is not None:
raise result.ae
raise TApplicationException(TApplicationException.MISSING_RESULT, "updateGateway failed: unknown result")
def getGateway(self, authzToken, airavataInternalGatewayId):
"""
Parameters:
- authzToken
- airavataInternalGatewayId
"""
self.send_getGateway(authzToken, airavataInternalGatewayId)
return self.recv_getGateway()
def send_getGateway(self, authzToken, airavataInternalGatewayId):
self._oprot.writeMessageBegin('getGateway', TMessageType.CALL, self._seqid)
args = getGateway_args()
args.authzToken = authzToken
args.airavataInternalGatewayId = airavataInternalGatewayId
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getGateway(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getGateway_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.tpe is not None:
raise result.tpe
if result.ae is not None:
raise result.ae
raise TApplicationException(TApplicationException.MISSING_RESULT, "getGateway failed: unknown result")
def deleteGateway(self, authzToken, airavataInternalGatewayId, gatewayId):
"""
Parameters:
- authzToken
- airavataInternalGatewayId
- gatewayId
"""
self.send_deleteGateway(authzToken, airavataInternalGatewayId, gatewayId)
return self.recv_deleteGateway()
def send_deleteGateway(self, authzToken, airavataInternalGatewayId, gatewayId):
self._oprot.writeMessageBegin('deleteGateway', TMessageType.CALL, self._seqid)
args = deleteGateway_args()
args.authzToken = authzToken
args.airavataInternalGatewayId = airavataInternalGatewayId
args.gatewayId = gatewayId
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_deleteGateway(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = deleteGateway_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.tpe is not None:
raise result.tpe
if result.ae is not None:
raise result.ae
raise TApplicationException(TApplicationException.MISSING_RESULT, "deleteGateway failed: unknown result")
def getAllGateways(self, authzToken):
"""
Parameters:
- authzToken
"""
self.send_getAllGateways(authzToken)
return self.recv_getAllGateways()
def send_getAllGateways(self, authzToken):
self._oprot.writeMessageBegin('getAllGateways', TMessageType.CALL, self._seqid)
args = getAllGateways_args()
args.authzToken = authzToken
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getAllGateways(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getAllGateways_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.tpe is not None:
raise result.tpe
if result.ae is not None:
raise result.ae
raise TApplicationException(TApplicationException.MISSING_RESULT, "getAllGateways failed: unknown result")
def isGatewayExist(self, authzToken, gatewayId):
"""
Parameters:
- authzToken
- gatewayId
"""
self.send_isGatewayExist(authzToken, gatewayId)
return self.recv_isGatewayExist()
def send_isGatewayExist(self, authzToken, gatewayId):
self._oprot.writeMessageBegin('isGatewayExist', TMessageType.CALL, self._seqid)
args = isGatewayExist_args()
args.authzToken = authzToken
args.gatewayId = gatewayId
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_isGatewayExist(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = isGatewayExist_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.tpe is not None:
raise result.tpe
if result.ae is not None:
raise result.ae
raise TApplicationException(TApplicationException.MISSING_RESULT, "isGatewayExist failed: unknown result")
def getAllGatewaysForUser(self, authzToken, requesterUsername):
"""
Parameters:
- authzToken
- requesterUsername
"""
self.send_getAllGatewaysForUser(authzToken, requesterUsername)
return self.recv_getAllGatewaysForUser()
def send_getAllGatewaysForUser(self, authzToken, requesterUsername):
self._oprot.writeMessageBegin('getAllGatewaysForUser', TMessageType.CALL, self._seqid)
args = getAllGatewaysForUser_args()
args.authzToken = authzToken
args.requesterUsername = requesterUsername
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getAllGatewaysForUser(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getAllGatewaysForUser_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.tpe is not None:
raise result.tpe
if result.ae is not None:
raise result.ae
raise TApplicationException(TApplicationException.MISSING_RESULT, "getAllGatewaysForUser failed: unknown result")
class Processor(airavata.base.api.BaseAPI.Processor, Iface, TProcessor):
def __init__(self, handler):
airavata.base.api.BaseAPI.Processor.__init__(self, handler)
self._processMap["addGateway"] = Processor.process_addGateway
self._processMap["updateGateway"] = Processor.process_updateGateway
self._processMap["getGateway"] = Processor.process_getGateway
self._processMap["deleteGateway"] = Processor.process_deleteGateway
self._processMap["getAllGateways"] = Processor.process_getAllGateways
self._processMap["isGatewayExist"] = Processor.process_isGatewayExist
self._processMap["getAllGatewaysForUser"] = Processor.process_getAllGatewaysForUser
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_addGateway(self, seqid, iprot, oprot):
args = addGateway_args()
args.read(iprot)
iprot.readMessageEnd()
result = addGateway_result()
try:
result.success = self._handler.addGateway(args.authzToken, args.gateway)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException as tpe:
msg_type = TMessageType.REPLY
result.tpe = tpe
except airavata.api.error.ttypes.AuthorizationException as ae:
msg_type = TMessageType.REPLY
result.ae = ae
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("addGateway", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_updateGateway(self, seqid, iprot, oprot):
args = updateGateway_args()
args.read(iprot)
iprot.readMessageEnd()
result = updateGateway_result()
try:
result.success = self._handler.updateGateway(args.authzToken, args.updatedGateway)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException as tpe:
msg_type = TMessageType.REPLY
result.tpe = tpe
except airavata.api.error.ttypes.AuthorizationException as ae:
msg_type = TMessageType.REPLY
result.ae = ae
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("updateGateway", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getGateway(self, seqid, iprot, oprot):
args = getGateway_args()
args.read(iprot)
iprot.readMessageEnd()
result = getGateway_result()
try:
result.success = self._handler.getGateway(args.authzToken, args.airavataInternalGatewayId)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException as tpe:
msg_type = TMessageType.REPLY
result.tpe = tpe
except airavata.api.error.ttypes.AuthorizationException as ae:
msg_type = TMessageType.REPLY
result.ae = ae
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getGateway", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_deleteGateway(self, seqid, iprot, oprot):
args = deleteGateway_args()
args.read(iprot)
iprot.readMessageEnd()
result = deleteGateway_result()
try:
result.success = self._handler.deleteGateway(args.authzToken, args.airavataInternalGatewayId, args.gatewayId)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException as tpe:
msg_type = TMessageType.REPLY
result.tpe = tpe
except airavata.api.error.ttypes.AuthorizationException as ae:
msg_type = TMessageType.REPLY
result.ae = ae
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("deleteGateway", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getAllGateways(self, seqid, iprot, oprot):
args = getAllGateways_args()
args.read(iprot)
iprot.readMessageEnd()
result = getAllGateways_result()
try:
result.success = self._handler.getAllGateways(args.authzToken)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException as tpe:
msg_type = TMessageType.REPLY
result.tpe = tpe
except airavata.api.error.ttypes.AuthorizationException as ae:
msg_type = TMessageType.REPLY
result.ae = ae
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getAllGateways", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_isGatewayExist(self, seqid, iprot, oprot):
args = isGatewayExist_args()
args.read(iprot)
iprot.readMessageEnd()
result = isGatewayExist_result()
try:
result.success = self._handler.isGatewayExist(args.authzToken, args.gatewayId)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException as tpe:
msg_type = TMessageType.REPLY
result.tpe = tpe
except airavata.api.error.ttypes.AuthorizationException as ae:
msg_type = TMessageType.REPLY
result.ae = ae
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("isGatewayExist", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getAllGatewaysForUser(self, seqid, iprot, oprot):
args = getAllGatewaysForUser_args()
args.read(iprot)
iprot.readMessageEnd()
result = getAllGatewaysForUser_result()
try:
result.success = self._handler.getAllGatewaysForUser(args.authzToken, args.requesterUsername)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException as tpe:
msg_type = TMessageType.REPLY
result.tpe = tpe
except airavata.api.error.ttypes.AuthorizationException as ae:
msg_type = TMessageType.REPLY
result.ae = ae
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getAllGatewaysForUser", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class addGateway_args(object):
"""
Attributes:
- authzToken
- gateway
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'authzToken', (airavata.model.security.ttypes.AuthzToken, airavata.model.security.ttypes.AuthzToken.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'gateway', (airavata.model.workspace.ttypes.Gateway, airavata.model.workspace.ttypes.Gateway.thrift_spec), None, ), # 2
)
def __init__(self, authzToken=None, gateway=None,):
self.authzToken = authzToken
self.gateway = gateway
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.authzToken = airavata.model.security.ttypes.AuthzToken()
self.authzToken.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.gateway = airavata.model.workspace.ttypes.Gateway()
self.gateway.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('addGateway_args')
if self.authzToken is not None:
oprot.writeFieldBegin('authzToken', TType.STRUCT, 1)
self.authzToken.write(oprot)
oprot.writeFieldEnd()
if self.gateway is not None:
oprot.writeFieldBegin('gateway', TType.STRUCT, 2)
self.gateway.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.authzToken is None:
raise TProtocolException(message='Required field authzToken is unset!')
if self.gateway is None:
raise TProtocolException(message='Required field gateway is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class addGateway_result(object):
"""
Attributes:
- success
- tpe
- ae
"""
thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
(1, TType.STRUCT, 'tpe', (airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException, airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'ae', (airavata.api.error.ttypes.AuthorizationException, airavata.api.error.ttypes.AuthorizationException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, tpe=None, ae=None,):
self.success = success
self.tpe = tpe
self.ae = ae
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.tpe = airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException()
self.tpe.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.ae = airavata.api.error.ttypes.AuthorizationException()
self.ae.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('addGateway_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
if self.tpe is not None:
oprot.writeFieldBegin('tpe', TType.STRUCT, 1)
self.tpe.write(oprot)
oprot.writeFieldEnd()
if self.ae is not None:
oprot.writeFieldBegin('ae', TType.STRUCT, 2)
self.ae.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class updateGateway_args(object):
"""
Attributes:
- authzToken
- updatedGateway
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'authzToken', (airavata.model.security.ttypes.AuthzToken, airavata.model.security.ttypes.AuthzToken.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'updatedGateway', (airavata.model.workspace.ttypes.Gateway, airavata.model.workspace.ttypes.Gateway.thrift_spec), None, ), # 2
)
def __init__(self, authzToken=None, updatedGateway=None,):
self.authzToken = authzToken
self.updatedGateway = updatedGateway
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.authzToken = airavata.model.security.ttypes.AuthzToken()
self.authzToken.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.updatedGateway = airavata.model.workspace.ttypes.Gateway()
self.updatedGateway.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('updateGateway_args')
if self.authzToken is not None:
oprot.writeFieldBegin('authzToken', TType.STRUCT, 1)
self.authzToken.write(oprot)
oprot.writeFieldEnd()
if self.updatedGateway is not None:
oprot.writeFieldBegin('updatedGateway', TType.STRUCT, 2)
self.updatedGateway.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.authzToken is None:
raise TProtocolException(message='Required field authzToken is unset!')
if self.updatedGateway is None:
raise TProtocolException(message='Required field updatedGateway is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class updateGateway_result(object):
"""
Attributes:
- success
- tpe
- ae
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'tpe', (airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException, airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'ae', (airavata.api.error.ttypes.AuthorizationException, airavata.api.error.ttypes.AuthorizationException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, tpe=None, ae=None,):
self.success = success
self.tpe = tpe
self.ae = ae
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.tpe = airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException()
self.tpe.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.ae = airavata.api.error.ttypes.AuthorizationException()
self.ae.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('updateGateway_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.tpe is not None:
oprot.writeFieldBegin('tpe', TType.STRUCT, 1)
self.tpe.write(oprot)
oprot.writeFieldEnd()
if self.ae is not None:
oprot.writeFieldBegin('ae', TType.STRUCT, 2)
self.ae.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getGateway_args(object):
"""
Attributes:
- authzToken
- airavataInternalGatewayId
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'authzToken', (airavata.model.security.ttypes.AuthzToken, airavata.model.security.ttypes.AuthzToken.thrift_spec), None, ), # 1
(2, TType.STRING, 'airavataInternalGatewayId', 'UTF8', None, ), # 2
)
def __init__(self, authzToken=None, airavataInternalGatewayId=None,):
self.authzToken = authzToken
self.airavataInternalGatewayId = airavataInternalGatewayId
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.authzToken = airavata.model.security.ttypes.AuthzToken()
self.authzToken.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.airavataInternalGatewayId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getGateway_args')
if self.authzToken is not None:
oprot.writeFieldBegin('authzToken', TType.STRUCT, 1)
self.authzToken.write(oprot)
oprot.writeFieldEnd()
if self.airavataInternalGatewayId is not None:
oprot.writeFieldBegin('airavataInternalGatewayId', TType.STRING, 2)
oprot.writeString(self.airavataInternalGatewayId.encode('utf-8') if sys.version_info[0] == 2 else self.airavataInternalGatewayId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.authzToken is None:
raise TProtocolException(message='Required field authzToken is unset!')
if self.airavataInternalGatewayId is None:
raise TProtocolException(message='Required field airavataInternalGatewayId is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getGateway_result(object):
"""
Attributes:
- success
- tpe
- ae
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (airavata.model.workspace.ttypes.Gateway, airavata.model.workspace.ttypes.Gateway.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'tpe', (airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException, airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'ae', (airavata.api.error.ttypes.AuthorizationException, airavata.api.error.ttypes.AuthorizationException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, tpe=None, ae=None,):
self.success = success
self.tpe = tpe
self.ae = ae
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = airavata.model.workspace.ttypes.Gateway()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.tpe = airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException()
self.tpe.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.ae = airavata.api.error.ttypes.AuthorizationException()
self.ae.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getGateway_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.tpe is not None:
oprot.writeFieldBegin('tpe', TType.STRUCT, 1)
self.tpe.write(oprot)
oprot.writeFieldEnd()
if self.ae is not None:
oprot.writeFieldBegin('ae', TType.STRUCT, 2)
self.ae.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class deleteGateway_args(object):
"""
Attributes:
- authzToken
- airavataInternalGatewayId
- gatewayId
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'authzToken', (airavata.model.security.ttypes.AuthzToken, airavata.model.security.ttypes.AuthzToken.thrift_spec), None, ), # 1
(2, TType.STRING, 'airavataInternalGatewayId', 'UTF8', None, ), # 2
(3, TType.STRING, 'gatewayId', 'UTF8', None, ), # 3
)
def __init__(self, authzToken=None, airavataInternalGatewayId=None, gatewayId=None,):
self.authzToken = authzToken
self.airavataInternalGatewayId = airavataInternalGatewayId
self.gatewayId = gatewayId
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.authzToken = airavata.model.security.ttypes.AuthzToken()
self.authzToken.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.airavataInternalGatewayId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.gatewayId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('deleteGateway_args')
if self.authzToken is not None:
oprot.writeFieldBegin('authzToken', TType.STRUCT, 1)
self.authzToken.write(oprot)
oprot.writeFieldEnd()
if self.airavataInternalGatewayId is not None:
oprot.writeFieldBegin('airavataInternalGatewayId', TType.STRING, 2)
oprot.writeString(self.airavataInternalGatewayId.encode('utf-8') if sys.version_info[0] == 2 else self.airavataInternalGatewayId)
oprot.writeFieldEnd()
if self.gatewayId is not None:
oprot.writeFieldBegin('gatewayId', TType.STRING, 3)
oprot.writeString(self.gatewayId.encode('utf-8') if sys.version_info[0] == 2 else self.gatewayId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.authzToken is None:
raise TProtocolException(message='Required field authzToken is unset!')
if self.airavataInternalGatewayId is None:
raise TProtocolException(message='Required field airavataInternalGatewayId is unset!')
if self.gatewayId is None:
raise TProtocolException(message='Required field gatewayId is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class deleteGateway_result(object):
"""
Attributes:
- success
- tpe
- ae
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'tpe', (airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException, airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'ae', (airavata.api.error.ttypes.AuthorizationException, airavata.api.error.ttypes.AuthorizationException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, tpe=None, ae=None,):
self.success = success
self.tpe = tpe
self.ae = ae
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.tpe = airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException()
self.tpe.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.ae = airavata.api.error.ttypes.AuthorizationException()
self.ae.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('deleteGateway_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.tpe is not None:
oprot.writeFieldBegin('tpe', TType.STRUCT, 1)
self.tpe.write(oprot)
oprot.writeFieldEnd()
if self.ae is not None:
oprot.writeFieldBegin('ae', TType.STRUCT, 2)
self.ae.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getAllGateways_args(object):
"""
Attributes:
- authzToken
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'authzToken', (airavata.model.security.ttypes.AuthzToken, airavata.model.security.ttypes.AuthzToken.thrift_spec), None, ), # 1
)
def __init__(self, authzToken=None,):
self.authzToken = authzToken
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.authzToken = airavata.model.security.ttypes.AuthzToken()
self.authzToken.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getAllGateways_args')
if self.authzToken is not None:
oprot.writeFieldBegin('authzToken', TType.STRUCT, 1)
self.authzToken.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.authzToken is None:
raise TProtocolException(message='Required field authzToken is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getAllGateways_result(object):
"""
Attributes:
- success
- tpe
- ae
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT, (airavata.model.workspace.ttypes.Gateway, airavata.model.workspace.ttypes.Gateway.thrift_spec), False), None, ), # 0
(1, TType.STRUCT, 'tpe', (airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException, airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'ae', (airavata.api.error.ttypes.AuthorizationException, airavata.api.error.ttypes.AuthorizationException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, tpe=None, ae=None,):
self.success = success
self.tpe = tpe
self.ae = ae
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype3, _size0) = iprot.readListBegin()
for _i4 in range(_size0):
_elem5 = airavata.model.workspace.ttypes.Gateway()
_elem5.read(iprot)
self.success.append(_elem5)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.tpe = airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException()
self.tpe.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.ae = airavata.api.error.ttypes.AuthorizationException()
self.ae.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getAllGateways_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter6 in self.success:
iter6.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.tpe is not None:
oprot.writeFieldBegin('tpe', TType.STRUCT, 1)
self.tpe.write(oprot)
oprot.writeFieldEnd()
if self.ae is not None:
oprot.writeFieldBegin('ae', TType.STRUCT, 2)
self.ae.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class isGatewayExist_args(object):
"""
Attributes:
- authzToken
- gatewayId
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'authzToken', (airavata.model.security.ttypes.AuthzToken, airavata.model.security.ttypes.AuthzToken.thrift_spec), None, ), # 1
(2, TType.STRING, 'gatewayId', 'UTF8', None, ), # 2
)
def __init__(self, authzToken=None, gatewayId=None,):
self.authzToken = authzToken
self.gatewayId = gatewayId
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.authzToken = airavata.model.security.ttypes.AuthzToken()
self.authzToken.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.gatewayId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('isGatewayExist_args')
if self.authzToken is not None:
oprot.writeFieldBegin('authzToken', TType.STRUCT, 1)
self.authzToken.write(oprot)
oprot.writeFieldEnd()
if self.gatewayId is not None:
oprot.writeFieldBegin('gatewayId', TType.STRING, 2)
oprot.writeString(self.gatewayId.encode('utf-8') if sys.version_info[0] == 2 else self.gatewayId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.authzToken is None:
raise TProtocolException(message='Required field authzToken is unset!')
if self.gatewayId is None:
raise TProtocolException(message='Required field gatewayId is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class isGatewayExist_result(object):
"""
Attributes:
- success
- tpe
- ae
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'tpe', (airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException, airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'ae', (airavata.api.error.ttypes.AuthorizationException, airavata.api.error.ttypes.AuthorizationException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, tpe=None, ae=None,):
self.success = success
self.tpe = tpe
self.ae = ae
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.tpe = airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException()
self.tpe.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.ae = airavata.api.error.ttypes.AuthorizationException()
self.ae.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('isGatewayExist_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.tpe is not None:
oprot.writeFieldBegin('tpe', TType.STRUCT, 1)
self.tpe.write(oprot)
oprot.writeFieldEnd()
if self.ae is not None:
oprot.writeFieldBegin('ae', TType.STRUCT, 2)
self.ae.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getAllGatewaysForUser_args(object):
"""
Attributes:
- authzToken
- requesterUsername
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'authzToken', (airavata.model.security.ttypes.AuthzToken, airavata.model.security.ttypes.AuthzToken.thrift_spec), None, ), # 1
(2, TType.STRING, 'requesterUsername', 'UTF8', None, ), # 2
)
def __init__(self, authzToken=None, requesterUsername=None,):
self.authzToken = authzToken
self.requesterUsername = requesterUsername
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.authzToken = airavata.model.security.ttypes.AuthzToken()
self.authzToken.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.requesterUsername = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getAllGatewaysForUser_args')
if self.authzToken is not None:
oprot.writeFieldBegin('authzToken', TType.STRUCT, 1)
self.authzToken.write(oprot)
oprot.writeFieldEnd()
if self.requesterUsername is not None:
oprot.writeFieldBegin('requesterUsername', TType.STRING, 2)
oprot.writeString(self.requesterUsername.encode('utf-8') if sys.version_info[0] == 2 else self.requesterUsername)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.authzToken is None:
raise TProtocolException(message='Required field authzToken is unset!')
if self.requesterUsername is None:
raise TProtocolException(message='Required field requesterUsername is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getAllGatewaysForUser_result(object):
"""
Attributes:
- success
- tpe
- ae
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT, (airavata.model.workspace.ttypes.Gateway, airavata.model.workspace.ttypes.Gateway.thrift_spec), False), None, ), # 0
(1, TType.STRUCT, 'tpe', (airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException, airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'ae', (airavata.api.error.ttypes.AuthorizationException, airavata.api.error.ttypes.AuthorizationException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, tpe=None, ae=None,):
self.success = success
self.tpe = tpe
self.ae = ae
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype10, _size7) = iprot.readListBegin()
for _i11 in range(_size7):
_elem12 = airavata.model.workspace.ttypes.Gateway()
_elem12.read(iprot)
self.success.append(_elem12)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.tpe = airavata.service.profile.tenant.cpi.error.ttypes.TenantProfileServiceException()
self.tpe.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.ae = airavata.api.error.ttypes.AuthorizationException()
self.ae.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getAllGatewaysForUser_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter13 in self.success:
iter13.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.tpe is not None:
oprot.writeFieldBegin('tpe', TType.STRUCT, 1)
self.tpe.write(oprot)
oprot.writeFieldEnd()
if self.ae is not None:
oprot.writeFieldBegin('ae', TType.STRUCT, 2)
self.ae.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| 38.390717
| 220
| 0.603327
| 6,565
| 65,341
| 5.833054
| 0.031531
| 0.014624
| 0.026323
| 0.023032
| 0.881026
| 0.849898
| 0.835562
| 0.815324
| 0.790646
| 0.790646
| 0
| 0.004746
| 0.29698
| 65,341
| 1,701
| 221
| 38.413286
| 0.828892
| 0.024074
| 0
| 0.852833
| 1
| 0
| 0.035729
| 0.005254
| 0
| 0
| 0
| 0
| 0
| 1
| 0.100074
| false
| 0.005151
| 0.005887
| 0.025754
| 0.202355
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
076e3f3e40b888bde05dbc604bb2668511635ee7
| 1,135
|
py
|
Python
|
model_selection_cb/make_scripts.py
|
akshaykr/oracle_cb
|
68f10fce5eca8ebe3f57fd5a56a0ef8d82537ab4
|
[
"MIT"
] | 26
|
2017-08-02T19:58:06.000Z
|
2021-11-03T06:31:01.000Z
|
model_selection_cb/make_scripts.py
|
akshaykr/oracle_cb
|
68f10fce5eca8ebe3f57fd5a56a0ef8d82537ab4
|
[
"MIT"
] | 1
|
2020-03-03T06:06:32.000Z
|
2020-03-03T06:06:32.000Z
|
model_selection_cb/make_scripts.py
|
akshaykr/oracle_cb
|
68f10fce5eca8ebe3f57fd5a56a0ef8d82537ab4
|
[
"MIT"
] | 10
|
2017-06-02T19:34:38.000Z
|
2022-03-22T10:38:51.000Z
|
import numpy as np
if __name__=='__main__':
T = 4000
d = 1000
s = 10
K = 2
delta_vals = np.logspace(-3,1,10)
eps_vals = np.logspace(-3,1,10)
iters = 20
print("cd ../")
for i in range(len(delta_vals)):
print("python3 -W ignore LimeCB.py --T %d --d %d --s %d --K %d --iters %d --param %0.3f --alg %s --noise 1.0 --base linucb" % (T, d, s, K, iters, eps_vals[i], 'limecb'))
print("python3 -W ignore LimeCB.py --T %d --d %d --s %d --K %d --iters %d --param %0.3f --alg %s --noise 1.0 --base linucb" % (T, d, s, K, iters, eps_vals[i], 'oracle'))
print("python3 -W ignore LimeCB.py --T %d --d %d --s %d --K %d --iters %d --param %0.3f --alg %s --noise 1.0 --base minimonster" % (T, d, s, K, iters, eps_vals[i], 'limecb'))
print("python3 -W ignore LimeCB.py --T %d --d %d --s %d --K %d --iters %d --param %0.3f --alg %s --noise 1.0 --base minimonster" % (T, d, s, K, iters, eps_vals[i], 'oracle'))
print("python3 -W ignore LimeCB.py --T %d --d %d --s %d --K %d --iters %d --param %0.3f --alg %s --noise 1.0" % (T, d, s, K, iters, delta_vals[i], 'linucb'))
| 51.590909
| 183
| 0.532159
| 213
| 1,135
| 2.760563
| 0.206573
| 0.034014
| 0.110544
| 0.161565
| 0.828231
| 0.812925
| 0.751701
| 0.751701
| 0.751701
| 0.751701
| 0
| 0.052995
| 0.235242
| 1,135
| 21
| 184
| 54.047619
| 0.624424
| 0
| 0
| 0
| 0
| 0.3125
| 0.542328
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.0625
| 0
| 0.0625
| 0.375
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0789ba58e6a1adb60f102222e0f0451c03b4a69c
| 19,484
|
py
|
Python
|
elitist_es.py
|
LsTam91/elitist_constrained_es
|
3c707165b4462c736284c53e0afe2f58e49ed418
|
[
"MIT"
] | 1
|
2021-04-27T08:20:19.000Z
|
2021-04-27T08:20:19.000Z
|
elitist_es.py
|
LsTam91/elitist_constrained_es
|
3c707165b4462c736284c53e0afe2f58e49ed418
|
[
"MIT"
] | null | null | null |
elitist_es.py
|
LsTam91/elitist_constrained_es
|
3c707165b4462c736284c53e0afe2f58e49ed418
|
[
"MIT"
] | null | null | null |
import numpy as np
class CholeskyElitistES:
"""
Implementation of the (1+1)-Cholesky-CMA-ES without constraint.
It is the implementation presented in the article:
'A Computational Efficient Covariance Matrix Update and a (1+1)CMA for
Evolution Strategies'
by C. Igel, T. Suttorp, and N. Hansen.
"""
def __init__(self, x0, sigma0):
self.x = x0
self.sigma = sigma0
self.dim = len(x0)
self.A = np.eye(self.dim)
self.fct = [1e15]
# Parameters for updateStepSize:
self.p_target_succ = 2 / 11 # target succes rate
self.p_succ = self.p_target_succ
self.d = 1 + self.dim / 2 # controls the rate of the step size adaptation
self.c_p = 1 / 12 # learning rate of the average success
# Parameters for updateCholesky:
self.p_thresh = 0.44
self.c_cov = 2 / (self.dim**2 + 6)
# Parameters for stopping criterium :
self.tolsig = 1e-8
self.stagnation = 0
self.best = []
self.TolX = 1e-12 * sigma0
def ask(self):
"""
Sample a candidate solution from x
"""
self.z = np.random.normal(size=self.dim)
return self.x + self.sigma * self.A.dot(self.z)
def tell(self, x, f):
"""
Update the ES internal model from x and its objective value f(x)
"""
lbd = f <= self.fct[-1]
self._updateStepSize(lbd)
if lbd:
self.x = x
self.stagnation = 0
self.fct.append(f)
self.best.append(f)
self._updateCholesky()
else:
self.fct.append(self.fct[-1])
self.stagnation += 1
def _updateStepSize(self, lbd):
"""
Update the value of the step size sigma and the averaged success rate,
p_succ.
"""
self.p_succ = (1 - self.c_p) * self.p_succ + self.c_p * lbd
self.sigma *= np.exp(1/self.d * ((self.p_succ - self.p_target_succ)
/ (1 - self.p_target_succ)))
def _updateCholesky(self):
"""
Update of the cholesky matrix in order to change the search space for
new candidates
"""
if self.p_succ < self.p_thresh:
c_a = np.sqrt(1 - self.c_cov)
update_coef = c_a / np.linalg.norm(self.z)**2 \
* (np.sqrt(1 + (1 - c_a**2) * np.linalg.norm(self.z)**2
/ c_a**2) - 1)
self.A = c_a * self.A + update_coef * self.A.dot(np.outer(self.z, self.z))
def stop(self):
"""
Stopping criteria
"""
if self.sigma < self.tolsig:
print("sigma")
return True
elif self.stagnation > 120 + 30 * self.dim:
print("Stagnation crit")
return True
elif len(self.best) > 2 and self.best[-2] - self.best[-1] < 1e-12:
print("TolFun crit")
return True
elif self.sigma * self.p_succ < self.TolX:
print("TolX crit")
return True
def fmin(f, x0, sigma0, plot=False):
"""
Standard interface to unconstrained optimization
"""
es = CholeskyElitistES(x0, sigma0)
sig = []
vp = []
std = []
xs = []
while not es.stop():
x = es.ask()
es.tell(x, f(x))
vp.append(np.linalg.eig(es.A.T.dot(es.A))[0])
sig.append(es.sigma)
std.append(np.diag(es.A.dot(es.A)))
xs.append(es.x)
if plot:
return es, vp, sig, std, xs
return es
class ActiveElitistES:
"""
It is the implementation of the algorithm presented in the article:
'A (1+1)-CMA-ES for Constrained Optimisation'
by D. V. Arnold, and N. Hansen.
"""
def __init__(self, x0, sigma0,
tolsig=1e-10, tolfun=1e-9, TolX=1e-10
):
# Optimization variables
self.x = x0
self.sigma = sigma0
self.dim = len(x0)
self.A = np.eye(self.dim)
self.fct = [1e15]
self.z = np.ones(self.dim) * 1e-4
self.v = np.array([])
self.w = np.array([])
# Solver variables
self.count_f = 0
self.count_g = 0
# Parameter settings
self.d = 1 + self.dim / 2 # controls the rate of the step size adaptation
self.c = 2 / (self.dim + 2)
self.c_p = 1 / 12 # learning rate of the average success
self.p_target = 2 / 11 # target succes rate
self.c_cov_plus = 2 / (self.dim**2 + 6)
self.c_c = 1 / (self.dim + 2)
self.beta = 0.1 / (self.dim + 2)
# Variable:
self.p_succ = 2 / 11 # p_target
self.fifth_order = np.ones(5) * np.inf
self.s = 0
# Parameters for stopping criterion :
self.tolsig = tolsig
self.tolfun = tolfun
self.stagnation = 0
self.tolstagnation = 120 + 30 * self.dim
self.best = []
self.TolX = TolX * sigma0
self.tolcountf = np.inf
self.tolcountg = np.inf
self.stop_now = False
def ask(self):
"""
Sample a candidate solution from x
"""
self.z = np.random.normal(size=self.dim)
return self.x + self.sigma * self.A.dot(self.z)
def tell(self, x_new, f):
"""
Update the ES internal model from x and its objective value f(x)
"""
lbd = f <= self.fct[-1]
self._updateStepSize(lbd)
if lbd:
self.x = x_new
self.fct.append(f)
self.best.append(f)
self._updateCholesky()
self.stagnation = 0
else:
self.fct.append(self.fct[-1])
self.stagnation += 1
# if len(self.fct) > 5 and sum(self.fifth_order > f) == 0:
if (not lbd) and self.p_succ < 0.44 and all(self.fifth_order < f):
self._updateFifthOrder()
self.fifth_order = np.concatenate([self.fifth_order[1:], [f]])
self.count_f += 1
def test(self, g):
"""
If the solution isn't feasible we update the cholesky matrix, A and the
exponentially fading record, v.
"""
m = len(g)
feasible = True
summ = 0
# Init
if self.v.shape == (0,):
self.v = np.zeros((m, self.dim))
if self.w.shape == (0,):
self.w = np.zeros((m, self.dim))
# We take the inverse of A only if the solution is infeasible
if any(u > 0 for u in g):
inv_A = np.linalg.inv(self.A)
feasible = False
for j in range(m):
if g[j] > 0:
self.v[j] *= (1 - self.c_c)
self.v[j] += self.c_c * self.A.dot(self.z)
self.w[j] = inv_A.dot(self.v[j])
summ += np.outer(self.v[j], self.w[j]) / self.w[j].T.dot(self.w[j])
if not feasible:
self.A -= self.beta / np.sum([u > 0 for u in g]) * summ
if np.isnan(self.A).any():
print("ERROR: NaN values in the covariance matrix")
print(f"After {self.count_g} constraint evaluations")
print(f"summ value: {summ}")
raise RuntimeError("NaN values in the covariance matrix")
self.count_g += 1
return feasible
def _updateStepSize(self, lbd):
"""
Update the value of the step size sigma and the averaged success rate,
p_succ.
"""
self.p_succ = (1 - self.c_p) * self.p_succ + self.c_p * lbd
self.sigma *= np.exp(
(self.p_succ - self.p_target) / ((1 - self.p_target) * self.d)
)
def _updateCholesky(self):
"""
Update of the cholesky matrix and the exponentially fading record, s,
in order to change the search space for new candidates.
Rather than working with the covariance matrix and performing a
Cholesky decompositionin every iteration of the algorithm, Igel et al.
presented a direct update of A.
"""
if self.p_succ < 0.44:
self.s *= (1 - self.c)
self.s += np.sqrt(self.c * (2 - self.c)) * self.A.dot(self.z)
self.alpha = 1 - self.c_cov_plus
else:
self.s *= 1 - self.c
self.alpha = 1 - self.c_cov_plus + self.c_cov_plus * self.c * (2 - self.c)
u = np.linalg.inv(self.A).dot(self.s)
u2 = np.linalg.norm(u)**2
self.A *= np.sqrt(self.alpha)
self.A += np.sqrt(self.alpha) * (np.sqrt(1 + self.c_cov_plus * u2
/ (self.alpha)) - 1) * np.outer(self.s, u) / u2
assert not np.isnan(self.A).any()
def _updateFifthOrder(self):
"""
In the case where the solution is worst than the fifth last, we
incorporate the active covariance matrix update due to Jastrebski and
Arnold.
"""
self.c_cov_minus = np.min(
[
0.4 / (self.dim**(1.6) + 1),
1 / abs(2 * np.linalg.norm(self.z)**2 - 1)
]
)
z2 = np.linalg.norm(self.z)**2
self.A *= np.sqrt(1 + self.c_cov_minus)
self.A += np.sqrt(1 + self.c_cov_minus) / z2 \
* (np.sqrt(1 - self.c_cov_minus * z2 / (1 + self.c_cov_minus)) - 1) \
* self.A.dot(np.outer(self.z, self.z))
assert not np.isnan(self.A).any()
def stop(self, inner=False):
"""
Stopping criteria
Set inner to true to test only for criteria related to the inner loop
"""
if not inner:
if self.sigma < self.tolsig:
print("sigma")
return True
elif self.stagnation > self.tolstagnation:
# Stagnation crit
print("Stagnation crit")
return True
elif len(self.best) > 2 and self.best[-2] - self.best[-1] < self.tolfun:
# TolFun crit
print("TolFun crit")
return True
elif self.sigma * self.p_succ < self.TolX:
# TolX crit
print("TolX crit")
return True
if self.count_f >= self.tolcountf or self.count_g > self.tolcountg:
print("Number of evals exceeded")
return True
return False
class FastActiveElitistES:
"""
It is the implementation of the algorithm presented in the article:
'A (1+1)-CMA-ES for Constrained Optimisation' by D. V. Arnold,
and N. Hansen. I use the fast update of the covariance matrix in order to
reduce the update complexity from O(n^3) to O(n^2) for the Cholesky
update and the fifth order update and from O(n^3+mn^2) to O(mn^2) for the
active constraint handling, which is a new update.
"""
def __init__(self, x0, sigma0,
tolsig=1e-10, tolfun=1e-9, TolX=1e-10
):
'''
x0 : Assert the starting point is in the feaseble space!
sigma0 : initial step size
tolsig, tolfun, TolX : stopping criteria,
check pycma documentation for more information.
'''
# Optimization variables
self.x = x0
self.sigma = sigma0
self.dim = len(x0)
self.A = np.eye(self.dim)
self.invA = np.eye(self.dim)
self.Abis = np.eye(self.dim)
self.invAbis = np.eye(self.dim)
self.fct = [1e15]
self.z = np.ones(self.dim) * 1e-4
self.v = np.array([])
self.w = np.array([])
# Solver variables
self.count_f = 0
self.count_g = 0
# Parameter settings
self.d = 1 + self.dim / 2 # controls the rate of the step size adaptation
self.c = 2 / (self.dim + 2)
self.c_p = 1 / 12 # learning rate of the average success
self.p_target = 2 / 11 # target succes rate
self.c_cov_plus = 2 / (self.dim**2 + 6)
self.c_c = 1 / (self.dim + 2)
self.beta = 0.1 / (self.dim + 2)
# Variable:
self.p_succ = 2 / 11 # p_target
self.fifth_order = np.ones(5) * np.inf
self.s = 0
# Parameters for stopping criterion :
self.tolsig = tolsig
self.tolfun = tolfun
self.stagnation = 0
self.tolstagnation = 120 + 30 * self.dim
self.best = []
self.TolX = TolX * sigma0
self.tolcountf = np.inf
self.tolcountg = np.inf
self.stop_now = False
def ask(self):
"""
Sample a candidate solution from x
"""
self.z = np.random.normal(size=self.dim)
return self.x + self.sigma * self.A.dot(self.z)
def tell(self, x_new, f):
"""
Update the ES internal model from x and its objective value f(x)
"""
lbd = f <= self.fct[-1]
self._updateStepSize(lbd)
if lbd:
self.x = x_new
self.fct.append(f)
self.best.append(f)
self._updateCholesky()
self.stagnation = 0
else:
self.fct.append(self.fct[-1])
self.stagnation += 1
if (not lbd) and self.p_succ < 0.44 and all(self.fifth_order < f):
self._updateFifthOrder()
self.fifth_order = np.concatenate([self.fifth_order[1:], [f]])
self.count_f += 1
def test(self, g):
"""
If the solution isn't feasible we update the cholesky matrix, A and the
exponentially fading record, v.
"""
m = len(g)
feasible = True
summ = 0
# Init
if self.v.shape == (0,):
self.v = np.zeros((m, self.dim))
if self.w.shape == (0,):
self.w = np.ones((m, self.dim))
for j in range(m):
if g[j] > 0:
self.v[j] *= (1 - self.c_c)
self.v[j] += self.c_c * self.A.dot(self.z)
self.w[j] = self.invA.dot(self.v[j])
summ += np.outer(self.v[j], self.w[j]) / self.w[j].T.dot(self.w[j])
feasible = False
m_a = sum(g > 0)
if not feasible:
self.A -= self.beta / m_a * summ
if np.isnan(self.A).any():
print("ERROR: NaN values in the covariance matrix")
print(f"After {self.count_g} constraint evaluations")
print(f"summ value: {summ}")
raise RuntimeError("NaN values in the covariance matrix")
if m_a > 0:
ind = g > 0
U = (- self.v[ind] * self.beta / m_a).T
V = np.array([self.w[j] / (self.w[j].T @ self.w[j]) for j in np.arange(m)[ind]])
if m_a == 1:
C = 1
self.invA -= (self.invA @ U) * 1/(C + V @ self.invA @ U)[0, 0] * (V @ self.invA)
else:
C = np.eye(m_a)
self.invA -= (self.invA @ U) @ np.linalg.inv(C + V @ self.invA @ U) @ (V @ self.invA)
self.count_g += 1
return feasible
def _updateStepSize(self, lbd):
"""
Update the value of the step size sigma and the averaged success rate,
p_succ.
"""
self.p_succ = (1 - self.c_p) * self.p_succ + self.c_p * lbd
self.sigma *= np.exp(
(self.p_succ - self.p_target) / ((1 - self.p_target) * self.d)
)
def _updateCholesky(self):
"""
Update of the cholesky matrix and the exponentially fading record, s,
in order to change the search space for new candidates.
Rather than working with the covariance matrix and performing a
Cholesky decompositionin every iteration of the algorithm, Igel et al.
presented a direct update of A.
"""
if self.p_succ < 0.44:
self.s *= 1 - self.c
self.s += np.sqrt(self.c * (2 - self.c)) * self.A.dot(self.z)
self.alpha = 1 - self.c_cov_plus
else:
self.s *= 1 - self.c
self.alpha = 1 - self.c_cov_plus + self.c_cov_plus * self.c * (2 - self.c)
u = self.invA.dot(self.s)
u2 = u.T @ u
self.A *= np.sqrt(self.alpha)
self.A += np.sqrt(self.alpha) / u2 * (np.sqrt(1 + self.c_cov_plus * u2
/ (self.alpha)) - 1) * np.outer(self.s, u)
A_temp = self.invA / np.sqrt(self.alpha)
if u2 != 0:
A_temp -= (1 - 1 / np.sqrt(1 + self.c_cov_plus * u2/self.alpha)
) / (np.sqrt(self.alpha) * u2) * np.outer(u, u.T.dot(self.invA))
self.invA = A_temp
assert not np.isnan(self.A).any()
def _updateFifthOrder(self):
"""
In the case where the solution is worst than the fifth last, we
incorporate the active covariance matrix update due to Jastrebski and
Arnold.
"""
z2 = self.z.T @ self.z
self.c_cov_minus = np.min(
[0.4 / (self.dim**(1.6) + 1),
1 / abs(2 * z2 - 1)])
a = np.sqrt(1 + self.c_cov_minus)
b = a / z2 * (np.sqrt(1 - self.c_cov_minus * z2 / (1+self.c_cov_minus)) - 1)
Az = self.A @ self.z
self.A *= a
self.A += b * np.outer(Az, self.z)
self.invA = self.invA / a - (b / a**2) / (1 + z2 * b / a) * np.outer(self.z, self.z.T.dot(self.invA))
assert not np.isnan(self.A).any()
def stop(self, inner=False):
"""
Stopping criteria
Set inner to true to test only for criteria related to the inner loop
"""
if not inner:
if self.sigma < self.tolsig:
print("sigma")
return True
elif self.stagnation > self.tolstagnation:
# Stagnation crit
print("Stagnation crit")
return True
elif len(self.best) > 2 and self.best[-2] - self.best[-1] < self.tolfun:
# TolFun crit
print("TolFun crit")
return True
elif self.sigma * self.p_succ < self.TolX:
# TolX crit
print("TolX crit")
return True
if self.count_f >= self.tolcountf or self.count_g > self.tolcountg:
print("Number of evals exceeded")
return True
return False
def fmin_con(objective, constraint, x0, sigma0, options=True, plot=False):
"""
Interface for constrained optimization
"""
n_f = 0
n_g = 0
es = FastActiveElitistES(x0, sigma0)
sig = []
vp = []
std = []
xs = []
while not es.stop():
while True:
x = es.ask()
g = constraint(x)
n_g += 1
is_feasible = es.test(g)
# To plot latter
vp.append(np.linalg.eig(es.A.T.dot(es.A))[0])
sig.append(es.sigma)
std.append(np.diag(es.A.dot(es.A)))
if n_g % 1500 == 0 and options:
print("{0} evaluation of f and {1} of the constraint."
.format(n_f, n_g))
if is_feasible:
break
xs.append(es.x)
f = objective(x)
n_f += 1
es.tell(x, f)
if plot:
return es, vp, sig, std, xs
return es
def fmin2(f, x0, sigma0):
"""
Standard interface to unconstrained optimization with ActiveElitistES
"""
es = FastActiveElitistES(x0, sigma0)
while not es.stop():
x = es.ask()
es.tell(x, f(x))
return es
| 32.204959
| 109
| 0.516064
| 2,713
| 19,484
| 3.638776
| 0.100258
| 0.025831
| 0.014587
| 0.013675
| 0.821515
| 0.787784
| 0.77654
| 0.759724
| 0.75
| 0.73987
| 0
| 0.025879
| 0.363375
| 19,484
| 605
| 110
| 32.204959
| 0.769994
| 0.198573
| 0
| 0.742105
| 0
| 0
| 0.033101
| 0
| 0
| 0
| 0
| 0
| 0.010526
| 1
| 0.065789
| false
| 0
| 0.002632
| 0
| 0.144737
| 0.055263
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.