hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9e23a5b0d5de527a766c7127684d45f09948165b
| 10,931
|
py
|
Python
|
dabl/portfolios/portfolio_hgb.py
|
bhishanpdl/dabl
|
96693a8e5655b0aec18cacfaa334584eb8e57f53
|
[
"BSD-3-Clause"
] | 500
|
2019-04-01T13:50:18.000Z
|
2022-03-07T01:50:45.000Z
|
dabl/portfolios/portfolio_hgb.py
|
bhishanpdl/dabl
|
96693a8e5655b0aec18cacfaa334584eb8e57f53
|
[
"BSD-3-Clause"
] | 111
|
2019-04-01T17:48:40.000Z
|
2020-03-27T16:39:19.000Z
|
dabl/portfolios/portfolio_hgb.py
|
bhishanpdl/dabl
|
96693a8e5655b0aec18cacfaa334584eb8e57f53
|
[
"BSD-3-Clause"
] | 60
|
2019-04-01T14:58:35.000Z
|
2021-08-13T02:58:20.000Z
|
from sklearn.experimental import enable_hist_gradient_boosting
from sklearn.ensemble import HistGradientBoostingClassifier
enable_hist_gradient_boosting
def portfolio_hgb():
hgb = [
HistGradientBoostingClassifier(l2_regularization=1e-06,
max_bins=255,
max_iter=200,
max_leaf_nodes=128,
min_samples_leaf=50),
HistGradientBoostingClassifier(l2_regularization=1.0,
max_bins=64,
max_depth=5,
max_leaf_nodes=4),
HistGradientBoostingClassifier(l2_regularization=1.0,
max_bins=64,
max_depth=18,
max_iter=350,
max_leaf_nodes=32,
min_samples_leaf=7),
HistGradientBoostingClassifier(l2_regularization=1e-07,
max_bins=16,
max_depth=19,
max_iter=500,
max_leaf_nodes=8,
min_samples_leaf=27),
HistGradientBoostingClassifier(l2_regularization=10.0,
max_bins=255,
max_depth=16,
max_leaf_nodes=128,
min_samples_leaf=8),
HistGradientBoostingClassifier(l2_regularization=1e-07,
max_bins=255,
max_depth=16,
max_iter=350,
max_leaf_nodes=128,
min_samples_leaf=13),
HistGradientBoostingClassifier(l2_regularization=10.0,
max_bins=8,
max_depth=20,
max_iter=150,
max_leaf_nodes=4,
min_samples_leaf=13),
HistGradientBoostingClassifier(l2_regularization=1e-07,
max_bins=16,
max_depth=3,
max_iter=350,
max_leaf_nodes=16,
min_samples_leaf=14),
HistGradientBoostingClassifier(l2_regularization=0.001,
learning_rate=0.01,
max_bins=64,
max_depth=9,
max_iter=450,
max_leaf_nodes=32,
min_samples_leaf=12),
HistGradientBoostingClassifier(l2_regularization=0.0001,
max_bins=128,
max_depth=20,
max_iter=500,
max_leaf_nodes=128,
min_samples_leaf=3),
HistGradientBoostingClassifier(l2_regularization=0.01,
learning_rate=0.01,
max_bins=8,
max_depth=19,
max_iter=350,
max_leaf_nodes=4,
min_samples_leaf=3),
HistGradientBoostingClassifier(l2_regularization=0.01,
learning_rate=1.0,
max_bins=32,
max_depth=2,
max_iter=150,
max_leaf_nodes=32),
HistGradientBoostingClassifier(l2_regularization=1e-06,
max_bins=128,
max_depth=12,
max_iter=300,
max_leaf_nodes=4,
min_samples_leaf=3),
HistGradientBoostingClassifier(l2_regularization=1e-08,
max_bins=8,
max_depth=6,
max_iter=500,
max_leaf_nodes=32,
min_samples_leaf=15),
HistGradientBoostingClassifier(l2_regularization=10.0,
max_bins=255,
max_depth=20,
max_iter=400,
max_leaf_nodes=64,
min_samples_leaf=5),
HistGradientBoostingClassifier(l2_regularization=10.0,
learning_rate=1.0,
max_bins=16,
max_depth=2,
max_iter=200,
max_leaf_nodes=64,
min_samples_leaf=2),
HistGradientBoostingClassifier(l2_regularization=0.01,
max_bins=255,
max_depth=3,
max_iter=400,
max_leaf_nodes=16,
min_samples_leaf=11),
HistGradientBoostingClassifier(l2_regularization=100.0,
max_bins=255,
max_depth=11,
max_iter=400,
max_leaf_nodes=16,
min_samples_leaf=18),
HistGradientBoostingClassifier(l2_regularization=0.001,
max_bins=64,
max_depth=14,
max_iter=200,
max_leaf_nodes=16,
min_samples_leaf=3),
HistGradientBoostingClassifier(l2_regularization=0.01,
max_bins=16,
max_depth=4,
max_iter=450,
max_leaf_nodes=64,
min_samples_leaf=19),
HistGradientBoostingClassifier(l2_regularization=0.0001,
learning_rate=0.01,
max_bins=64,
max_depth=19,
max_iter=200,
max_leaf_nodes=32,
min_samples_leaf=8),
HistGradientBoostingClassifier(l2_regularization=1e-05,
max_bins=16,
max_iter=400,
max_leaf_nodes=128,
min_samples_leaf=48),
HistGradientBoostingClassifier(l2_regularization=10.0,
max_bins=16,
max_depth=4,
max_iter=50,
max_leaf_nodes=4,
min_samples_leaf=6),
HistGradientBoostingClassifier(l2_regularization=1.0,
max_bins=128,
max_depth=16,
max_leaf_nodes=8,
min_samples_leaf=5),
HistGradientBoostingClassifier(l2_regularization=0.001,
max_bins=255,
max_depth=18,
max_iter=450,
max_leaf_nodes=64,
min_samples_leaf=9),
HistGradientBoostingClassifier(l2_regularization=1.0,
max_bins=16,
max_depth=2,
max_iter=300,
max_leaf_nodes=16,
min_samples_leaf=8),
HistGradientBoostingClassifier(l2_regularization=10.0,
max_bins=255,
max_depth=15,
max_iter=200,
max_leaf_nodes=64),
HistGradientBoostingClassifier(l2_regularization=0.01,
max_bins=64,
max_depth=5,
max_iter=450,
max_leaf_nodes=32,
min_samples_leaf=15),
HistGradientBoostingClassifier(l2_regularization=1.0,
learning_rate=1.0,
max_bins=128,
max_depth=2,
max_iter=50,
max_leaf_nodes=32,
min_samples_leaf=3),
HistGradientBoostingClassifier(l2_regularization=100.0,
max_bins=64,
max_depth=7,
max_iter=350,
max_leaf_nodes=8,
min_samples_leaf=5),
HistGradientBoostingClassifier(l2_regularization=1e-05,
max_bins=255,
max_depth=16,
max_iter=400,
max_leaf_nodes=64,
min_samples_leaf=10),
HistGradientBoostingClassifier(l2_regularization=1e-10,
max_bins=64,
max_depth=2,
max_leaf_nodes=4,
min_samples_leaf=3)
]
return (hgb)
| 54.655
| 64
| 0.351477
| 721
| 10,931
| 4.972261
| 0.0957
| 0.285635
| 0.4106
| 0.131102
| 0.925244
| 0.842957
| 0.78689
| 0.46304
| 0.394979
| 0.359275
| 0
| 0.104133
| 0.603787
| 10,931
| 199
| 65
| 54.929648
| 0.72362
| 0
| 0
| 0.826531
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.005102
| false
| 0
| 0.010204
| 0
| 0.020408
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f5944e98b8ec6555affcf278fa910561b23b71d1
| 162
|
py
|
Python
|
player/views.py
|
nthall/watershed
|
bd94bbab6bc581fb6d8f970997e23273f638ee14
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 1
|
2020-08-29T02:44:14.000Z
|
2020-08-29T02:44:14.000Z
|
player/views.py
|
nthall/watershed
|
bd94bbab6bc581fb6d8f970997e23273f638ee14
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 5
|
2020-07-21T04:09:10.000Z
|
2020-10-08T17:59:45.000Z
|
player/views.py
|
nthall/watershed
|
bd94bbab6bc581fb6d8f970997e23273f638ee14
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
from __future__ import unicode_literals
from django.shortcuts import render_to_response
def index(request):
return render_to_response('player/index.html')
| 20.25
| 50
| 0.820988
| 22
| 162
| 5.636364
| 0.727273
| 0.129032
| 0.258065
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117284
| 162
| 7
| 51
| 23.142857
| 0.867133
| 0
| 0
| 0
| 0
| 0
| 0.104938
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
19250220adbc070db9596e9ce67a1c3a2014ca47
| 11,259
|
py
|
Python
|
guiunits/pon56gDemoNNTrainingOutput.py
|
zdx198811/lab604-automation
|
f73acdce38422d9c3a0845a553efa4eebc662086
|
[
"MIT"
] | 1
|
2020-10-30T15:22:20.000Z
|
2020-10-30T15:22:20.000Z
|
guiunits/pon56gDemoNNTrainingOutput.py
|
zdx198811/lab604-automation
|
f73acdce38422d9c3a0845a553efa4eebc662086
|
[
"MIT"
] | null | null | null |
guiunits/pon56gDemoNNTrainingOutput.py
|
zdx198811/lab604-automation
|
f73acdce38422d9c3a0845a553efa4eebc662086
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Fri Apr 12 09:51:15 2019
@author: dongxucz
"""
training_console_output = \
'INFO: epoch 1-1, loss: 0.094 (vt899pondemo)\n\
INFO: epoch 1-300, loss: 0.061 (vt899pondemo)\n\
INFO: epoch 1-599, loss: 0.015 (vt899pondemo)\n\
INFO: epoch 1-898, loss: 0.014 (vt899pondemo)\n\
INFO: epoch 1-1197, loss: 0.014 (vt899pondemo)\n\
INFO: epoch 1-1496, loss: 0.013 (vt899pondemo)\n\
INFO: epoch 1-1795, loss: 0.013 (vt899pondemo)\n\
INFO: epoch 1-2094, loss: 0.013 (vt899pondemo)\n\
INFO: epoch 1-2393, loss: 0.013 (vt899pondemo)\n\
INFO: epoch 1-2692, loss: 0.013 (vt899pondemo)\n\
INFO: epoch 1-2991, loss: 0.013 (vt899pondemo)\n\
INFO: epoch 1-3290, loss: 0.012 (vt899pondemo)\n\
INFO: epoch 1-3589, loss: 0.012 (vt899pondemo)\n\
INFO: epoch 1-3888, loss: 0.012 (vt899pondemo)\n\
INFO: epoch 1-4187, loss: 0.012 (vt899pondemo)\n\
INFO: epoch 1-4486, loss: 0.012 (vt899pondemo)\n\
INFO: epoch 1-4785, loss: 0.012 (vt899pondemo)\n\
INFO: epoch 1-5084, loss: 0.012 (vt899pondemo)\n\
INFO: epoch 1-5383, loss: 0.012 (vt899pondemo)\n\
INFO: epoch 1-5682, loss: 0.012 (vt899pondemo)\n\
INFO: epoch 1-5981, loss: 0.012 (vt899pondemo)\n\
INFO: epoch 1-6280, loss: 0.011 (vt899pondemo)\n\
INFO: epoch 1-6579, loss: 0.012 (vt899pondemo)\n\
INFO: epoch 1-6878, loss: 0.012 (vt899pondemo)\n\
INFO: epoch 1-7177, loss: 0.011 (vt899pondemo)\n\
INFO: epoch 1-7476, loss: 0.012 (vt899pondemo)\n\
INFO: epoch 1-7775, loss: 0.011 (vt899pondemo)\n\
INFO: epoch 1-8074, loss: 0.012 (vt899pondemo)\n\
INFO: epoch 1-8373, loss: 0.012 (vt899pondemo)\n\
INFO: epoch 1-8672, loss: 0.011 (vt899pondemo)\n\
INFO: epoch 1-8971, loss: 0.011 (vt899pondemo)\n\
INFO: epoch 1-9270, loss: 0.011 (vt899pondemo)\n\
INFO: epoch 1-9569, loss: 0.011 (vt899pondemo)\n\
INFO: epoch 1-9868, loss: 0.011 (vt899pondemo)\n\
INFO: epoch 1-10167, loss: 0.011 (vt899pondemo)\n\
INFO: epoch 1-10466, loss: 0.011 (vt899pondemo)\n\
INFO: epoch 1-10765, loss: 0.011 (vt899pondemo)\n\
INFO: epoch 1-11064, loss: 0.011 (vt899pondemo)\n\
INFO: epoch 1-11363, loss: 0.011 (vt899pondemo)\n\
INFO: epoch 1-11662, loss: 0.011 (vt899pondemo)\n\
INFO: epoch 1-11961, loss: 0.011 (vt899pondemo)\n\
INFO: epoch 1-12260, loss: 0.011 (vt899pondemo)\n\
INFO: epoch 1-12559, loss: 0.011 (vt899pondemo)\n\
INFO: epoch 1-12858, loss: 0.011 (vt899pondemo)\n\
INFO: epoch 1-13157, loss: 0.011 (vt899pondemo)\n\
INFO: epoch 1-13456, loss: 0.011 (vt899pondemo)\n\
INFO: epoch 1-13755, loss: 0.011 (vt899pondemo)\n\
INFO: epoch 1-14054, loss: 0.011 (vt899pondemo)\n\
INFO: epoch 1-14353, loss: 0.011 (vt899pondemo)\n\
INFO: epoch 1-14652, loss: 0.010 (vt899pondemo)\n\
INFO: epoch 1-14951, loss: 0.010 (vt899pondemo)\n\
INFO: epoch 1-15250, loss: 0.010 (vt899pondemo)\n\
INFO: 7772\n\
input=tensor([[-0.2107, -0.0406, -0.2831, -0.4894, -0.2871, -0.0609, -0.0105, 0.1210, 0.2017, 0.0715, 0.1082, 0.0649, -0.0845, -0.0027, -0.0310, 0.0000]], dtype=torch.float64)\n\
label=0.0\n\
output=0.50046411456858771.0\n\
INFO: 9053\n\
input=tensor([[-0.1779, 0.2140, 0.1716, -0.3705, -0.4830, -0.1552, 0.2418, 0.4829, 0.5325, 0.3768, 0.0569, -0.1252, -0.2003, -0.2202, 0.1606, 0.0000]], dtype=torch.float64)\n\
label=0.0\n\
output=-0.5087356085560115-1.0\n\
INFO: Accuracy on 9986 test data: 99.9800 %\n\
INFO: epoch 2-1, loss: 0.010 (vt899pondemo)\n\
INFO: epoch 2-300, loss: 0.010 (vt899pondemo)\n\
INFO: epoch 2-599, loss: 0.010 (vt899pondemo)\n\
INFO: epoch 2-898, loss: 0.010 (vt899pondemo)\n\
INFO: epoch 2-1197, loss: 0.010 (vt899pondemo)\n\
INFO: epoch 2-1496, loss: 0.010 (vt899pondemo)\n\
INFO: epoch 2-1795, loss: 0.009 (vt899pondemo)\n\
INFO: epoch 2-2094, loss: 0.009 (vt899pondemo)\n\
INFO: epoch 2-2393, loss: 0.009 (vt899pondemo)\n\
INFO: epoch 2-2692, loss: 0.009 (vt899pondemo)\n\
INFO: epoch 2-2991, loss: 0.009 (vt899pondemo)\n\
INFO: epoch 2-3290, loss: 0.008 (vt899pondemo)\n\
INFO: epoch 2-3589, loss: 0.008 (vt899pondemo)\n\
INFO: epoch 2-3888, loss: 0.008 (vt899pondemo)\n\
INFO: epoch 2-4187, loss: 0.008 (vt899pondemo)\n\
INFO: epoch 2-4486, loss: 0.008 (vt899pondemo)\n\
INFO: epoch 2-4785, loss: 0.008 (vt899pondemo)\n\
INFO: epoch 2-5084, loss: 0.008 (vt899pondemo)\n\
INFO: epoch 2-5383, loss: 0.007 (vt899pondemo)\n\
INFO: epoch 2-5682, loss: 0.007 (vt899pondemo)\n\
INFO: epoch 2-5981, loss: 0.007 (vt899pondemo)\n\
INFO: epoch 2-6280, loss: 0.007 (vt899pondemo)\n\
INFO: epoch 2-6579, loss: 0.007 (vt899pondemo)\n\
INFO: epoch 2-6878, loss: 0.007 (vt899pondemo)\n\
INFO: epoch 2-7177, loss: 0.007 (vt899pondemo)\n\
INFO: epoch 2-7476, loss: 0.007 (vt899pondemo)\n\
INFO: epoch 2-7775, loss: 0.007 (vt899pondemo)\n\
INFO: epoch 2-8074, loss: 0.006 (vt899pondemo)\n\
INFO: epoch 2-8373, loss: 0.006 (vt899pondemo)\n\
INFO: epoch 2-8672, loss: 0.006 (vt899pondemo)\n\
INFO: epoch 2-8971, loss: 0.006 (vt899pondemo)\n\
INFO: epoch 2-9270, loss: 0.006 (vt899pondemo)\n\
INFO: epoch 2-9569, loss: 0.006 (vt899pondemo)\n\
INFO: epoch 2-9868, loss: 0.006 (vt899pondemo)\n\
INFO: epoch 2-10167, loss: 0.006 (vt899pondemo)\n\
INFO: epoch 2-10466, loss: 0.006 (vt899pondemo)\n\
INFO: epoch 2-10765, loss: 0.006 (vt899pondemo)\n\
INFO: epoch 2-11064, loss: 0.006 (vt899pondemo)\n\
INFO: epoch 2-11363, loss: 0.006 (vt899pondemo)\n\
INFO: epoch 2-11662, loss: 0.005 (vt899pondemo)\n\
INFO: epoch 2-11961, loss: 0.005 (vt899pondemo)\n\
INFO: epoch 2-12260, loss: 0.005 (vt899pondemo)\n\
INFO: epoch 2-12559, loss: 0.005 (vt899pondemo)\n\
INFO: epoch 2-12858, loss: 0.005 (vt899pondemo)\n\
INFO: epoch 2-13157, loss: 0.005 (vt899pondemo)\n\
INFO: epoch 2-13456, loss: 0.005 (vt899pondemo)\n\
INFO: epoch 2-13755, loss: 0.005 (vt899pondemo)\n\
INFO: epoch 2-14054, loss: 0.004 (vt899pondemo)\n\
INFO: epoch 2-14353, loss: 0.004 (vt899pondemo)\n\
INFO: epoch 2-14652, loss: 0.004 (vt899pondemo)\n\
INFO: epoch 2-14951, loss: 0.004 (vt899pondemo)\n\
INFO: epoch 2-15250, loss: 0.004 (vt899pondemo)\n\
INFO: Accuracy on 9986 test data: 100.0000 %\n\
INFO: epoch 3-1, loss: 0.004 (vt899pondemo)\n\
INFO: epoch 3-300, loss: 0.004 (vt899pondemo)\n\
INFO: epoch 3-599, loss: 0.004 (vt899pondemo)\n\
INFO: epoch 3-898, loss: 0.004 (vt899pondemo)\n\
INFO: epoch 3-1197, loss: 0.004 (vt899pondemo)\n\
INFO: epoch 3-1496, loss: 0.003 (vt899pondemo)\n\
INFO: epoch 3-1795, loss: 0.004 (vt899pondemo)\n\
INFO: epoch 3-2094, loss: 0.003 (vt899pondemo)\n\
INFO: epoch 3-2393, loss: 0.003 (vt899pondemo)\n\
INFO: epoch 3-2692, loss: 0.003 (vt899pondemo)\n\
INFO: epoch 3-2991, loss: 0.003 (vt899pondemo)\n\
INFO: epoch 3-3290, loss: 0.003 (vt899pondemo)\n\
INFO: epoch 3-3589, loss: 0.003 (vt899pondemo)\n\
INFO: epoch 3-3888, loss: 0.003 (vt899pondemo)\n\
INFO: epoch 3-4187, loss: 0.003 (vt899pondemo)\n\
INFO: epoch 3-4486, loss: 0.003 (vt899pondemo)\n\
INFO: epoch 3-4785, loss: 0.003 (vt899pondemo)\n\
INFO: epoch 3-5084, loss: 0.003 (vt899pondemo)\n\
INFO: epoch 3-5383, loss: 0.003 (vt899pondemo)\n\
INFO: epoch 3-5682, loss: 0.003 (vt899pondemo)\n\
INFO: epoch 3-5981, loss: 0.003 (vt899pondemo)\n\
INFO: epoch 3-6280, loss: 0.003 (vt899pondemo)\n\
INFO: epoch 3-6579, loss: 0.003 (vt899pondemo)\n\
INFO: epoch 3-6878, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-7177, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-7476, loss: 0.003 (vt899pondemo)\n\
INFO: epoch 3-7775, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-8074, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-8373, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-8672, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-8971, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-9270, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-9569, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-9868, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-10167, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-10466, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-10765, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-11064, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-11363, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-11662, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-11961, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-12260, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-12559, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-12858, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-13157, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-13456, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-13755, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-14054, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-14353, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-14652, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-14951, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 3-15250, loss: 0.002 (vt899pondemo)\n\
INFO: Accuracy on 9986 test data: 100.0000 %\n\
INFO: epoch 4-1, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 4-300, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 4-599, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 4-898, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 4-1197, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 4-1496, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 4-1795, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 4-2094, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 4-2393, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 4-2692, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 4-2991, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 4-3290, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 4-3589, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 4-3888, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 4-4187, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 4-4486, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 4-4785, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 4-5084, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 4-5383, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 4-5682, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 4-5981, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 4-6280, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-6579, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-6878, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 4-7177, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-7476, loss: 0.002 (vt899pondemo)\n\
INFO: epoch 4-7775, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-8074, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-8373, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-8672, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-8971, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-9270, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-9569, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-9868, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-10167, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-10466, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-10765, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-11064, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-11363, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-11662, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-11961, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-12260, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-12559, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-12858, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-13157, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-13456, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-13755, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-14054, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-14353, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-14652, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-14951, loss: 0.001 (vt899pondemo)\n\
INFO: epoch 4-15250, loss: 0.001 (vt899pondemo)\n\
INFO: Accuracy on 9986 test data: 100.0000 %'.split('\n')
| 49.381579
| 184
| 0.696865
| 2,027
| 11,259
| 3.869758
| 0.071041
| 0.135773
| 0.45079
| 0.572157
| 0.843702
| 0.834906
| 0.828659
| 0.825217
| 0.029832
| 0.029832
| 0
| 0.2843
| 0.120881
| 11,259
| 228
| 185
| 49.381579
| 0.508183
| 0.006839
| 0
| 0.0181
| 0
| 0.00905
| 0.000626
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
19799a97e8e57b145cfd644af19a6802a03abf79
| 144
|
py
|
Python
|
How_module_works/module/__init__.py
|
456Product/GUI_module_tutorial
|
17ed26a4dd092bb274a70291a6bc78a5f4014641
|
[
"MIT"
] | null | null | null |
How_module_works/module/__init__.py
|
456Product/GUI_module_tutorial
|
17ed26a4dd092bb274a70291a6bc78a5f4014641
|
[
"MIT"
] | null | null | null |
How_module_works/module/__init__.py
|
456Product/GUI_module_tutorial
|
17ed26a4dd092bb274a70291a6bc78a5f4014641
|
[
"MIT"
] | null | null | null |
print(f"====== start : {__name__} ======")
print("\nfrom . import test_file")
from . import test_file
print(f"====== e n d : {__name__} ======")
| 36
| 42
| 0.555556
| 19
| 144
| 3.684211
| 0.631579
| 0.171429
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131944
| 144
| 4
| 43
| 36
| 0.56
| 0
| 0
| 0
| 0
| 0
| 0.613793
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.75
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
197ae1ba2759bc828cedd3a321f588c42f0c7232
| 135
|
py
|
Python
|
tests/parser/bug.16.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/bug.16.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/bug.16.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
Teacher :- Class.
key_found :- Teacher, Class.
"""
output = """
Teacher :- Class.
key_found :- Teacher, Class.
"""
| 12.272727
| 29
| 0.57037
| 14
| 135
| 5.357143
| 0.428571
| 0.64
| 0.4
| 0.533333
| 0.853333
| 0.853333
| 0
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 135
| 10
| 30
| 13.5
| 0.714286
| 0
| 0
| 0.75
| 0
| 0
| 0.75969
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
199aef70d3e07e6a22fb81ea4782ab5ca1b1ae19
| 8,086
|
py
|
Python
|
ratings/migrations/0012_auto_20211025_1536.py
|
FSTUM/rallyetool-v2
|
2f3e2b5cb8655abe023ed1215b7182430b75bb23
|
[
"MIT"
] | 1
|
2021-10-30T09:31:02.000Z
|
2021-10-30T09:31:02.000Z
|
ratings/migrations/0012_auto_20211025_1536.py
|
FSTUM/rallyetool-v2
|
2f3e2b5cb8655abe023ed1215b7182430b75bb23
|
[
"MIT"
] | 9
|
2021-11-23T10:13:43.000Z
|
2022-03-01T15:04:15.000Z
|
ratings/migrations/0012_auto_20211025_1536.py
|
CommanderStorm/rallyetool-v2
|
721413d6df8afc9347dac7ee83deb3a0ad4c01bc
|
[
"MIT"
] | 1
|
2021-10-16T09:07:47.000Z
|
2021-10-16T09:07:47.000Z
|
# Generated by Django 3.2.7 on 2021-10-25 13:36
import django.core.validators
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("ratings", "0011_auto_20211023_1743"),
]
operations = [
migrations.CreateModel(
name="RatingScheme3",
fields=[
("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
],
options={
"abstract": False,
},
),
migrations.AddField(
model_name="rating",
name="handicap",
field=models.PositiveIntegerField(
blank=True,
help_text="Needed for the RatingScheme 3",
null=True,
verbose_name="Handicap used for grading. (i.e. group-size)",
),
),
migrations.AddField(
model_name="rating",
name="value",
field=models.PositiveIntegerField(
blank=True,
help_text="Needed for the RatingScheme 2 or 3",
null=True,
verbose_name="Value achieved by the group",
),
),
migrations.AddField(
model_name="station",
name="rating_scheme_choices",
field=models.PositiveSmallIntegerField(
choices=[
(1, "1 - Rating of the groups by the tutors is final. No scheme."),
(2, "2 - Rating of the groups by the tutors is based on a single key."),
(3, "3 - Rating of the groups by the tutors is based on multiple keys (one for each handicap)."),
],
default=1,
verbose_name="Rating Scheme",
),
),
migrations.AlterField(
model_name="rating",
name="points",
field=models.IntegerField(
default=0,
validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(10)],
),
),
migrations.CreateModel(
name="RatingScheme3Group",
fields=[
("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
(
"mark_for_10p",
models.PositiveIntegerField(blank=True, null=True, verbose_name="Min-value for 10 points"),
),
(
"mark_for_9p",
models.PositiveIntegerField(blank=True, null=True, verbose_name="Min-value for 9 points"),
),
(
"mark_for_8p",
models.PositiveIntegerField(blank=True, null=True, verbose_name="Min-value for 8 points"),
),
(
"mark_for_7p",
models.PositiveIntegerField(blank=True, null=True, verbose_name="Min-value for 7 points"),
),
(
"mark_for_6p",
models.PositiveIntegerField(blank=True, null=True, verbose_name="Min-value for 6 points"),
),
(
"mark_for_5p",
models.PositiveIntegerField(blank=True, null=True, verbose_name="Min-value for 5 points"),
),
(
"mark_for_4p",
models.PositiveIntegerField(blank=True, null=True, verbose_name="Min-value for 4 points"),
),
(
"mark_for_3p",
models.PositiveIntegerField(blank=True, null=True, verbose_name="Min-value for 3 points"),
),
(
"mark_for_2p",
models.PositiveIntegerField(blank=True, null=True, verbose_name="Min-value for 2 points"),
),
(
"mark_for_1p",
models.PositiveIntegerField(blank=True, null=True, verbose_name="Min-value for 1 point"),
),
("handicap", models.PositiveIntegerField(verbose_name="Handicap used for grading. (i.e. group-size)")),
(
"rating_scheme",
models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="ratings.ratingscheme3"),
),
],
options={
"abstract": False,
},
),
migrations.AddField(
model_name="ratingscheme3",
name="station",
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to="ratings.station"),
),
migrations.CreateModel(
name="RatingScheme2",
fields=[
("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
(
"mark_for_10p",
models.PositiveIntegerField(blank=True, null=True, verbose_name="Min-value for 10 points"),
),
(
"mark_for_9p",
models.PositiveIntegerField(blank=True, null=True, verbose_name="Min-value for 9 points"),
),
(
"mark_for_8p",
models.PositiveIntegerField(blank=True, null=True, verbose_name="Min-value for 8 points"),
),
(
"mark_for_7p",
models.PositiveIntegerField(blank=True, null=True, verbose_name="Min-value for 7 points"),
),
(
"mark_for_6p",
models.PositiveIntegerField(blank=True, null=True, verbose_name="Min-value for 6 points"),
),
(
"mark_for_5p",
models.PositiveIntegerField(blank=True, null=True, verbose_name="Min-value for 5 points"),
),
(
"mark_for_4p",
models.PositiveIntegerField(blank=True, null=True, verbose_name="Min-value for 4 points"),
),
(
"mark_for_3p",
models.PositiveIntegerField(blank=True, null=True, verbose_name="Min-value for 3 points"),
),
(
"mark_for_2p",
models.PositiveIntegerField(blank=True, null=True, verbose_name="Min-value for 2 points"),
),
(
"mark_for_1p",
models.PositiveIntegerField(blank=True, null=True, verbose_name="Min-value for 1 point"),
),
("station", models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to="ratings.station")),
],
options={
"abstract": False,
},
),
migrations.CreateModel(
name="RatingScheme1",
fields=[
("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
("station", models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to="ratings.station")),
],
options={
"abstract": False,
},
),
]
| 41.896373
| 119
| 0.50136
| 728
| 8,086
| 5.413462
| 0.168956
| 0.078153
| 0.173053
| 0.195382
| 0.782543
| 0.775184
| 0.763258
| 0.739406
| 0.731794
| 0.720122
| 0
| 0.019262
| 0.390057
| 8,086
| 192
| 120
| 42.114583
| 0.779805
| 0.005565
| 0
| 0.655914
| 1
| 0
| 0.182361
| 0.008086
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.016129
| 0
| 0.032258
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5fedb89cf970172e5f09b226855ea8e955d42e20
| 6,220
|
py
|
Python
|
unifier/apps/core/migrations/0001_initial.py
|
sosolidkk/manga-unifier
|
4cca148affbb197b9284d46ef04c66d42d96c03a
|
[
"MIT"
] | 6
|
2021-03-25T14:55:36.000Z
|
2021-05-25T15:12:41.000Z
|
unifier/apps/core/migrations/0001_initial.py
|
sosolidkk/manga-unifier
|
4cca148affbb197b9284d46ef04c66d42d96c03a
|
[
"MIT"
] | 6
|
2021-02-19T12:32:26.000Z
|
2021-03-25T16:54:40.000Z
|
unifier/apps/core/migrations/0001_initial.py
|
sosolidkk/manga-unifier
|
4cca148affbb197b9284d46ef04c66d42d96c03a
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.6 on 2021-02-13 17:10
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Manga',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('title', models.CharField(max_length=256, verbose_name='Manga title')),
('year', models.PositiveIntegerField(verbose_name='Launch year')),
('chapters_count', models.PositiveIntegerField(default=0, verbose_name='Chapters count')),
('author', models.CharField(blank=True, max_length=128, null=True, verbose_name='Author name')),
('description', models.CharField(blank=True, max_length=128, null=True, verbose_name='Manga description')),
('rate', models.DecimalField(blank=True, decimal_places=2, max_digits=2, null=True, verbose_name='Manga rate')),
('cover', models.URLField(blank=True, max_length=128, null=True, verbose_name='Cover URL')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Novel',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('title', models.CharField(max_length=256, verbose_name='Novel title')),
('year', models.PositiveIntegerField(verbose_name='Launch year')),
('chapters_count', models.PositiveIntegerField(default=0, verbose_name='Chapters count')),
('author', models.CharField(blank=True, max_length=128, null=True, verbose_name='Author name')),
('description', models.CharField(blank=True, max_length=128, null=True, verbose_name='Novel description')),
('rate', models.DecimalField(decimal_places=2, max_digits=2, verbose_name='Novel rate')),
('cover', models.URLField(blank=True, max_length=128, null=True, verbose_name='Cover URL')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Platform',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('url', models.URLField(max_length=128, verbose_name='Platform URL')),
('name', models.CharField(max_length=128, verbose_name='Platform Name')),
('url_search', models.URLField(max_length=128, verbose_name='Platform search URL')),
('mangas', models.ManyToManyField(related_name='platform', to='core.Manga')),
('novels', models.ManyToManyField(related_name='platform', to='core.Novel')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='NovelChapter',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('number', models.PositiveIntegerField(verbose_name='Chapter number')),
('title', models.CharField(max_length=256, verbose_name='Chapter title')),
('language', models.PositiveSmallIntegerField(choices=[(0, 'English Us'), (1, 'Portuguese Br')], default=1, verbose_name='Chapter language')),
('body', models.TextField()),
('novel', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='novel_chapters', to='core.novel')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='MangaChapter',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('number', models.PositiveIntegerField(verbose_name='Chapter number')),
('title', models.CharField(max_length=256, verbose_name='Chapter title')),
('language', models.PositiveSmallIntegerField(choices=[(0, 'English Us'), (1, 'Portuguese Br')], default=1, verbose_name='Chapter language')),
('manga', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='manga_chapters', to='core.manga')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Image',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('path', models.URLField(max_length=128, verbose_name='Image path URL')),
('url', models.URLField(max_length=128, verbose_name='Image URL')),
('manga_chapter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='images', to='core.mangachapter')),
],
options={
'abstract': False,
},
),
]
| 54.561404
| 158
| 0.590997
| 626
| 6,220
| 5.71246
| 0.162939
| 0.076902
| 0.07047
| 0.083893
| 0.847036
| 0.838926
| 0.804251
| 0.780201
| 0.729866
| 0.698546
| 0
| 0.017075
| 0.265595
| 6,220
| 113
| 159
| 55.044248
| 0.765762
| 0.007235
| 0
| 0.641509
| 1
| 0
| 0.145472
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.028302
| 0
| 0.066038
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
27380cb55e47ef36de34e86c81b34693a96c2951
| 4,378
|
py
|
Python
|
cloudmarker/test/test_splunkhecstore.py
|
dkuspawono/cloudmarker
|
4d09b4666b3e2178b51b4e16688537b288e23f26
|
[
"MIT"
] | 208
|
2019-04-10T05:15:11.000Z
|
2022-03-16T17:41:29.000Z
|
cloudmarker/test/test_splunkhecstore.py
|
dkuspawono/cloudmarker
|
4d09b4666b3e2178b51b4e16688537b288e23f26
|
[
"MIT"
] | 88
|
2018-12-17T18:24:13.000Z
|
2021-05-15T04:19:53.000Z
|
cloudmarker/test/test_splunkhecstore.py
|
dkuspawono/cloudmarker
|
4d09b4666b3e2178b51b4e16688537b288e23f26
|
[
"MIT"
] | 15
|
2019-01-03T04:18:33.000Z
|
2021-06-03T09:24:31.000Z
|
"""Tests for SplunkHECToken plugin."""
import unittest
from unittest import mock
import requests
from cloudmarker.stores import splunkhecstore
class SplunkHECStoreTest(unittest.TestCase):
"""Tests for SplunkHECStore plugin."""
@mock.patch('requests.session')
def test_post_called_once(self, mock_session):
mock_record = {'record_type': 'firewall_rule'}
mock_response = mock.Mock()
mock_response.status_code = 200
mock_response.json.return_value = {'code': 0}
mock_session().post.return_value = mock_response
# Create a SplunkStore with buffer length 1000(default value)
splunk_store = splunkhecstore.SplunkHECStore('', '', '', '')
splunk_store.write(mock_record)
splunk_store.done()
mock_session().post.assert_called_once_with(mock.ANY,
headers=mock.ANY,
data=mock.ANY,
verify=mock.ANY)
@mock.patch('requests.session')
def test_happy_flow(self, mock_session):
mock_record = {'record_type': 'firewall_rule'}
mock_response = mock.MagicMock()
mock_response.status_code = 200
mock_response.json.return_value = {'code': 0}
mock_session().post.return_value = mock_response
splunk_store = splunkhecstore.SplunkHECStore('', '', '', '', 0)
splunk_store.write(mock_record)
splunk_store.done()
mock_session().post.assert_called_once_with(mock.ANY,
headers=mock.ANY,
data=mock.ANY,
verify=mock.ANY)
@mock.patch('requests.session')
def test_post_failure_no_data_loss(self, mock_session):
mock_record = {'record_type': 'firewall_rule'}
mock_response = mock.MagicMock()
mock_response.status_code = 500
mock_response.json.return_value = {'code': 0}
mock_session().post.return_value = mock_response
splunk_store = splunkhecstore.SplunkHECStore('', '', '', '', 0)
splunk_store.write(mock_record)
splunk_store.done()
mock_calls = mock_session().post.mock_calls
post_call_signature = mock.call(mock.ANY, headers=mock.ANY,
data=mock.ANY, verify=mock.ANY)
post_call_count = mock_calls.count(post_call_signature)
self.assertEqual(post_call_count, 2)
@mock.patch('requests.session')
def test_post_fail_splunk_unable_to_index(self, mock_session):
mock_record = {'record_type': 'firewall_rule'}
mock_response = mock.MagicMock()
mock_response.status_code = 200
mock_response.json.return_value = {'code': 1, 'text': 'foo'}
mock_session().post.return_value = mock_response
splunk_store = splunkhecstore.SplunkHECStore('', '', '', '', 0)
splunk_store.write(mock_record)
splunk_store.done()
mock_calls = mock_session().post.mock_calls
post_call_signature = mock.call(mock.ANY, headers=mock.ANY,
data=mock.ANY, verify=mock.ANY)
post_call_count = mock_calls.count(post_call_signature)
self.assertEqual(post_call_count, 2)
@mock.patch('requests.session')
def test_post_fail_splunk_response_non_json(self, mock_session):
mock_record = {'record_type': 'firewall_rule'}
mock_response = mock.MagicMock()
mock_response.status_code = 200
mock_response.json.side_effect = Exception()
mock_session().post.return_value = mock_response
splunk_store = splunkhecstore.SplunkHECStore('', '', '', '', 0)
splunk_store.write(mock_record)
splunk_store.done()
self.assertEqual(len(mock_response.json.mock_calls), 2)
@mock.patch('requests.session')
def test_post_fail_raise_connection_error(self, mock_session):
mock_record = {'record_type': 'firewall_rule'}
mock_session().post.side_effect = requests.ConnectionError()
splunk_store = splunkhecstore.SplunkHECStore('', '', '', '', 0)
splunk_store.write(mock_record)
splunk_store.done()
self.assertEqual(len(mock_session().post.mock_calls), 2)
| 37.741379
| 71
| 0.621745
| 481
| 4,378
| 5.340956
| 0.168399
| 0.098093
| 0.064227
| 0.056053
| 0.821331
| 0.813546
| 0.813546
| 0.798365
| 0.798365
| 0.782795
| 0
| 0.010003
| 0.269301
| 4,378
| 115
| 72
| 38.069565
| 0.79306
| 0.02878
| 0
| 0.768293
| 0
| 0
| 0.062014
| 0
| 0
| 0
| 0
| 0
| 0.073171
| 1
| 0.073171
| false
| 0
| 0.04878
| 0
| 0.134146
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
277704423932a6da81d6a1c5bc3d855369138807
| 138
|
py
|
Python
|
rastervision/v2/rv/data/label/__init__.py
|
carderne/raster-vision
|
915fbcd3263d8f2193e65c2cd0eb53e050a47a01
|
[
"Apache-2.0"
] | 1
|
2019-11-07T10:02:23.000Z
|
2019-11-07T10:02:23.000Z
|
rastervision/v2/rv/data/label/__init__.py
|
carderne/raster-vision
|
915fbcd3263d8f2193e65c2cd0eb53e050a47a01
|
[
"Apache-2.0"
] | null | null | null |
rastervision/v2/rv/data/label/__init__.py
|
carderne/raster-vision
|
915fbcd3263d8f2193e65c2cd0eb53e050a47a01
|
[
"Apache-2.0"
] | null | null | null |
# flake8: noqa
from rastervision.v2.rv.data.label.labels import *
from rastervision.v2.rv.data.label.chip_classification_labels import *
| 27.6
| 70
| 0.811594
| 20
| 138
| 5.5
| 0.6
| 0.290909
| 0.327273
| 0.363636
| 0.527273
| 0.527273
| 0
| 0
| 0
| 0
| 0
| 0.02381
| 0.086957
| 138
| 4
| 71
| 34.5
| 0.849206
| 0.086957
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
27b8c97a83e79314dcc547ec4668fffb38d9b5d1
| 6,312
|
py
|
Python
|
migrations/d9530a529b3f_add_timezone_awareness_for_datetime.py
|
MansoorHanif/FYP-web-app
|
918008d3b5eedaa904f3e720296afde9d73ac3f4
|
[
"BSD-3-Clause"
] | null | null | null |
migrations/d9530a529b3f_add_timezone_awareness_for_datetime.py
|
MansoorHanif/FYP-web-app
|
918008d3b5eedaa904f3e720296afde9d73ac3f4
|
[
"BSD-3-Clause"
] | 1
|
2022-01-13T03:10:29.000Z
|
2022-01-13T03:10:29.000Z
|
migrations/d9530a529b3f_add_timezone_awareness_for_datetime.py
|
MansoorHanif/FYP-web-app
|
918008d3b5eedaa904f3e720296afde9d73ac3f4
|
[
"BSD-3-Clause"
] | null | null | null |
"""add timezone awareness for datetime objects
Revision ID: d9530a529b3f
Revises: 221d918aa9f0
Create Date: 2016-06-21 09:39:38.348519
"""
# revision identifiers, used by Alembic.
revision = 'd9530a529b3f'
down_revision = '221d918aa9f0'
from alembic import op
import sqlalchemy as sa
import flaskbb
def upgrade():
connection = op.get_bind()
# Having a hard time with ALTER TABLE/COLUMN stuff in SQLite.. and because DateTime objects are stored as strings anyway,
# we can simply skip those migrations for SQLite
if connection.engine.dialect.name != "sqlite":
# user/models.py
op.alter_column('users', 'date_joined', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('users', 'lastseen', existing_type=sa.DateTime(), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('users', 'birthday', existing_type=sa.DateTime(), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('users', 'last_failed_login', existing_type=sa.DateTime(), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
# message/models.py
op.alter_column('conversations', 'date_created', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('messages', 'date_created', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
# forum/models.py
op.alter_column('topicsread', 'last_read', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('forumsread', 'last_read', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('forumsread', 'cleared', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('reports', 'reported', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('reports', 'zapped', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('posts', 'date_created', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('posts', 'date_modified', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('topics', 'date_created', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('topics', 'last_updated', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('forums', 'last_post_created', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
def downgrade():
connection = op.get_bind()
if connection.engine.dialect.name != "sqlite":
# user/models.py
op.alter_column('users', 'date_joined', type_=sa.DateTime(), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('users', 'lastseen', type_=sa.DateTime(), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('users', 'birthday', type_=sa.DateTime(), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('users', 'last_failed_login', type_=sa.DateTime(), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
# message/models.py
op.alter_column('conversations', 'date_created', type_=sa.DateTime(timezone=False), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('messages', 'date_created', type_=sa.DateTime(timezone=False), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
# forum/models.py
op.alter_column('topicsread', 'last_read', type_=sa.DateTime(timezone=False), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('forumsread', 'last_read', type_=sa.DateTime(timezone=False), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('forumsread', 'cleared', type_=sa.DateTime(timezone=False), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('reports', 'reported', type_=sa.DateTime(timezone=False), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('reports', 'zapped', type_=sa.DateTime(timezone=False), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('posts', 'date_created', type_=sa.DateTime(timezone=False), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('posts', 'date_modified', type_=sa.DateTime(timezone=False), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('topics', 'date_created', type_=sa.DateTime(timezone=False), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('topics', 'last_updated', type_=sa.DateTime(timezone=False), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('forums', 'last_post_created', type_=sa.DateTime(timezone=False), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
| 87.666667
| 180
| 0.772338
| 794
| 6,312
| 5.942065
| 0.127204
| 0.047478
| 0.088173
| 0.162781
| 0.908224
| 0.908224
| 0.908224
| 0.908224
| 0.908224
| 0.908224
| 0
| 0.009102
| 0.094899
| 6,312
| 71
| 181
| 88.901408
| 0.816734
| 0.069233
| 0
| 0.093023
| 0
| 0
| 0.103772
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046512
| false
| 0
| 0.069767
| 0
| 0.116279
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
27ef481b9153588616da4e270ec768d029963574
| 3,913
|
py
|
Python
|
ensemble_uncertainties/neural_estimators/architectures.py
|
ThomasDutschmann/ensemble_uncertainties
|
3babce02fb9b4c2710e32aeaaf3541ef627e534f
|
[
"MIT"
] | null | null | null |
ensemble_uncertainties/neural_estimators/architectures.py
|
ThomasDutschmann/ensemble_uncertainties
|
3babce02fb9b4c2710e32aeaaf3541ef627e534f
|
[
"MIT"
] | null | null | null |
ensemble_uncertainties/neural_estimators/architectures.py
|
ThomasDutschmann/ensemble_uncertainties
|
3babce02fb9b4c2710e32aeaaf3541ef627e534f
|
[
"MIT"
] | null | null | null |
"""Network architectures for our feed forward neural estimators."""
from tensorflow.keras.layers import Input, Dense, Dropout
from tensorflow.keras.models import Model, Sequential
def deep_architecture(output_activation):
"""Our default deep architecture for property prediction:
in | 256 ReLU | 128 ReLU | 16 ReLU | 1
Parameters
----------
output_activation : str
Name of the activation in the final neuron (tf.keras.activations)
Returns
-------
function
A function that takes the number of variables
(int) and returns the (uncompiled) model
"""
def make_model(n_vars):
model = Sequential()
model.add(Dense(256, input_shape=(n_vars,), activation='relu'))
model.add(Dense(128, activation='relu'))
model.add(Dense(16, activation='relu'))
model.add(Dense(1, activation=output_activation))
return model
return make_model
def deep_architecture_dropout(dropout_rate=0.2):
"""Our default deep architecture for regression, with dropout:
in | 256 ReLU | Dropout | 128 ReLU | Dropout | 16 ReLU | Dropout | 1
Parameters
----------
dropout_rate : float \in [0, 1]
Fraction of dropped out weights, default: 0.2
Returns
-------
function
A function that takes the number of variables
(int) and returns the (uncompiled) model
"""
def make_model(n_vars):
# Create model. To apply dropout during inference,
# the functional format must be used:
inp = Input(shape=(n_vars,))
x = Dropout(dropout_rate)(inp)
x = Dense(256, activation='relu')(x)
x = Dropout(dropout_rate)(x)
x = Dense(128, activation='relu')(x)
x = Dropout(dropout_rate)(x)
x = Dense(16, activation='relu')(x)
x = Dropout(dropout_rate)(x)
out = Dense(1, activation='linear')(x)
architecture = Model(inp, out)
return architecture
return make_model
def deep_architecture_mc_dropout(dropout_rate=0.2):
"""Our default deep architecture for regression, with dropout and
the setting 'training=True' to perform MC dropout during inference.
in | 256 ReLU | Dropout | 128 ReLU | Dropout | 16 ReLU | Dropout | 1
Parameters
----------
dropout_rate : float \in [0, 1]
Fraction of dropped out weights, default: 0.2
Returns
-------
function
A function that takes the number of variables
(int) and returns the (uncompiled) model
"""
def make_model(n_vars):
# Create model. To apply dropout during inference,
# the functional format must be used:
inp = Input(shape=(n_vars,))
x = Dropout(dropout_rate)(inp)
x = Dense(256, activation='relu')(x)
x = Dropout(dropout_rate)(x, training=True)
x = Dense(128, activation='relu')(x)
x = Dropout(dropout_rate)(x, training=True)
x = Dense(16, activation='relu')(x)
x = Dropout(dropout_rate)(x, training=True)
out = Dense(1, activation='linear')(x)
architecture = Model(inp, out)
return architecture
return make_model
def shallow_architecture(output_activation):
"""Our default shallow architecture for property prediction:
in | 128 ReLU | 1
Parameters
----------
output_activation : str
Name of the activation in the final neuron (tf.keras.activations)
Returns
-------
function
A function that takes the number of variables
(int) and returns the (uncompiled) model
"""
def make_model(n_vars):
model = Sequential()
model.add(Dense(128, input_shape=(n_vars,), activation='relu'))
model.add(Dense(1, activation=output_activation))
return model
return make_model
| 31.813008
| 73
| 0.615896
| 477
| 3,913
| 4.962264
| 0.18239
| 0.055767
| 0.076046
| 0.064216
| 0.891001
| 0.816223
| 0.801436
| 0.801436
| 0.801436
| 0.777355
| 0
| 0.026139
| 0.276514
| 3,913
| 122
| 74
| 32.07377
| 0.809961
| 0.44186
| 0
| 0.782609
| 0
| 0
| 0.026832
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.173913
| false
| 0
| 0.043478
| 0
| 0.391304
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7e0f51b8297d3c2cc1fd61106b39c04a7713f4a6
| 191
|
py
|
Python
|
examples/fastspeech/__init__.py
|
Yanshang1991/TensorFlowTTS
|
4693e39f1ac337b9d9d1991b4cdacdd0dc18917e
|
[
"Apache-2.0"
] | null | null | null |
examples/fastspeech/__init__.py
|
Yanshang1991/TensorFlowTTS
|
4693e39f1ac337b9d9d1991b4cdacdd0dc18917e
|
[
"Apache-2.0"
] | null | null | null |
examples/fastspeech/__init__.py
|
Yanshang1991/TensorFlowTTS
|
4693e39f1ac337b9d9d1991b4cdacdd0dc18917e
|
[
"Apache-2.0"
] | null | null | null |
from examples.fastspeech.fastspeech_dataset import CharactorDurationMelDataset
import examples.fastspeech.decode_fastspeech
from examples.fastspeech.fastspeech_dataset import CharactorDataset
| 63.666667
| 78
| 0.921466
| 19
| 191
| 9.105263
| 0.421053
| 0.312139
| 0.254335
| 0.369942
| 0.520231
| 0.520231
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04712
| 191
| 3
| 79
| 63.666667
| 0.950549
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
fdfc0e99ce88268ed6df75bf20db073f9b935ac5
| 39,747
|
py
|
Python
|
poker/test/test_env.py
|
MorGriffiths/PokerAI
|
a68400f4918f10dde82574ad19654243c9a65024
|
[
"MIT"
] | 2
|
2020-05-24T12:21:36.000Z
|
2022-02-08T03:02:17.000Z
|
poker/test/test_env.py
|
MorGriffiths/PokerAI
|
a68400f4918f10dde82574ad19654243c9a65024
|
[
"MIT"
] | 3
|
2017-04-28T00:25:18.000Z
|
2018-03-18T20:51:20.000Z
|
poker/test/test_env.py
|
C5ipo7i/PokerAI
|
a68400f4918f10dde82574ad19654243c9a65024
|
[
"MIT"
] | 2
|
2020-11-05T11:57:04.000Z
|
2021-03-17T17:57:24.000Z
|
import numpy as np
import torch
import unittest
import numpy as np
import os
import copy
from models.networks import OmahaActor,OmahaQCritic,OmahaObsQCritic,CombinedNet
from poker_env.env import Poker,Status
from poker_env.config import Config
import poker_env.datatypes as pdt
from utils.cardlib import winner,holdem_winner,encode
ACTION_CHECK = {
'action':0,
'action_category':0,
'action_probs':torch.zeros(5).fill_(0.2),
'action_prob':np.array([0.2]),
'betsize':0
}
ACTION_FOLD = {
'action':1,
'action_category':1,
'action_probs':torch.zeros(5).fill_(0.2),
'action_prob':np.array([0.2]),
'betsize':0
}
ACTION_BET = {
'action':4,
'action_category':3,
'action_probs':torch.zeros(5).fill_(0.2),
'action_prob':np.array([0.2]),
'betsize':1
}
ACTION_CALL = {
'action':2,
'action_category':2,
'action_probs':torch.zeros(5).fill_(0.2),
'action_prob':np.array([0.2]),
'betsize':0
}
ACTION_RAISE = {
'action':4,
'action_category':4,
'action_probs':torch.zeros(5).fill_(0.2),
'action_prob':np.array([0.2]),
'betsize':1
}
ACTION_MIN_RAISE = {
'action':4,
'action_category':4,
'action_probs':torch.zeros(5).fill_(0.2),
'action_prob':np.array([0.2]),
'betsize':0
}
STATE_SHAPE = 39
OBS_SHAPE = 49
def run_poker_env(env,case):
step = 0
state,obs,done,mask,betsize_mask = env.reset()
while not done:
action,action_category,action_prob,action_probs = case[step]
actor_output = {
'action':action,
'action_category':action_category,
'action_prob':action_prob,
'action_probs':action_probs
}
state,obs,done,mask,betsize_mask = env.step(actor_output)
step += 1
return env
class TestEnv(unittest.TestCase):
@classmethod
def setUp(self):
game_object = pdt.Globals.GameTypeDict[pdt.GameTypes.OMAHAHI]
config = Config()
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
self.network_params = config.network_params
self.network_params['device'] = device
self.env_params = {
'game':pdt.GameTypes.OMAHAHI,
'betsizes': game_object.rule_params['betsizes'],
'bet_type': game_object.rule_params['bettype'],
'n_players': 2,
'pot':1,
'stacksize': 5.,
'cards_per_player': game_object.state_params['cards_per_player'],
'starting_street': game_object.starting_street,
'global_mapping':config.global_mapping,
'state_mapping':config.state_mapping,
'obs_mapping':config.obs_mapping,
'shuffle':False
}
def testInitialization(self):
env = Poker(self.env_params)
assert env.street == self.env_params['starting_street']
assert env.game == self.env_params['game']
assert env.n_players == self.env_params['n_players']
assert len(env.players) == self.env_params['n_players']
assert env.starting_stack == self.env_params['stacksize']
def testReset(self):
params = copy.deepcopy(self.env_params)
params['starting_street'] = pdt.Street.RIVER
env = Poker(params)
state,obs,done,mask,betsize_mask = env.reset()
assert state.ndim == 3
assert obs.ndim == 3
assert state.shape == (1,1,STATE_SHAPE)
assert obs.shape == (1,1,OBS_SHAPE)
assert state[0,0,env.state_mapping['street']] == pdt.Street.RIVER
assert state[0,-1,env.state_mapping['hero_position']] == pdt.Position.BB
assert state[0,-1,env.state_mapping['hero_stacksize']] == self.env_params['stacksize']
assert state[0,-1,env.state_mapping['player1_position']] == pdt.Position.BB
assert state[0,-1,env.state_mapping['player1_stacksize']] == self.env_params['stacksize']
assert state[0,-1,env.state_mapping['player1_street_total']] == 0
assert state[0,-1,env.state_mapping['player2_position']] == pdt.Position.SB
assert state[0,-1,env.state_mapping['player2_stacksize']] == self.env_params['stacksize']
assert state[0,-1,env.state_mapping['player2_street_total']] == 0
assert state[0,-1,env.state_mapping['last_action']] == pdt.Action.UNOPENED
assert state[0,-1,env.state_mapping['last_aggressive_action']] == pdt.Action.UNOPENED
assert state[0,-1,env.state_mapping['last_betsize']] == 0
assert state[0,-1,env.state_mapping['last_position']] == pdt.Position.BTN
assert state[0,-1,env.state_mapping['amount_to_call']] == 0
assert state[0,-1,env.state_mapping['pot_odds']] == 0
assert env.players_remaining == 2
assert done == False
assert np.array_equal(mask,np.array([1., 0., 0., 1., 0.]))
assert np.array_equal(betsize_mask,np.array([1.,1.]))
assert len(env.players.players['SB'].hand) == self.env_params['cards_per_player']
assert len(env.players.players['BB'].hand) == self.env_params['cards_per_player']
assert len(env.deck) == 52 - (self.env_params['cards_per_player'] * self.env_params['n_players'] + pdt.Globals.INITIALIZE_BOARD_CARDS[params['starting_street']])
def testCheckCheck(self):
params = copy.deepcopy(self.env_params)
params['starting_street'] = pdt.Street.RIVER
params['stacksize'] = 5
params['pot'] = 1
env = Poker(params)
state,obs,done,mask,betsize_mask = env.reset()
state,obs,done,mask,betsize_mask = env.step(ACTION_CHECK)
assert state.ndim == 3
assert obs.ndim == 3
assert state.shape == (1,2,STATE_SHAPE)
assert obs.shape == (1,2,OBS_SHAPE)
assert state[:,1][:,env.state_mapping['street']] == params['starting_street']
assert state[:,1][:,env.state_mapping['hero_position']] == pdt.Position.SB
assert state[:,1][:,env.state_mapping['player2_position']] == pdt.Position.BB
assert state[:,1][:,env.state_mapping['last_position']] == pdt.Position.BB
assert state[:,1][:,env.state_mapping['last_action']] == pdt.Action.CHECK
assert state[:,1][:,env.state_mapping['hero_stacksize']] == params['stacksize']
assert state[:,1][:,env.state_mapping['player2_stacksize']] == params['stacksize']
assert state[:,1][:,env.state_mapping['amount_to_call']] == 0
assert state[:,1][:,env.state_mapping['pot_odds']] == 0
assert done == False
assert np.array_equal(mask,np.array([1., 0., 0., 1., 0.]))
assert np.array_equal(betsize_mask,np.array([1.,1.]))
state,obs,done,mask,betsize_mask = env.step(ACTION_CHECK)
assert done == True
assert env.players['SB'].stack == 6
assert env.players['BB'].stack == 5
def testCheckBetFold(self):
params = copy.deepcopy(self.env_params)
params['starting_street'] = pdt.Street.RIVER
params['stacksize'] = 5
params['pot'] = 1
env = Poker(params)
state,obs,done,mask,betsize_mask = env.reset()
state,obs,done,mask,betsize_mask = env.step(ACTION_CHECK)
state,obs,done,mask,betsize_mask = env.step(ACTION_BET)
assert state.ndim == 3
assert obs.ndim == 3
assert state.shape == (1,3,STATE_SHAPE)
assert obs.shape == (1,3,OBS_SHAPE)
assert env.players['SB'].stack == 4
assert env.players['BB'].stack == 5
assert env.players['SB'].street_total == 1
assert env.players['BB'].street_total == 0
assert env.pot == 2
assert state[:,-1][:,env.state_mapping['street']] == pdt.Street.RIVER
assert state[:,-1][:,env.state_mapping['hero_position']] == pdt.Position.BB
assert state[:,-1][:,env.state_mapping['last_position']] == pdt.Position.SB
assert state[:,-1][:,env.state_mapping['last_action']] == pdt.Action.BET
assert state[:,-1][:,env.state_mapping['last_betsize']] == 1
assert state[:,-1][:,env.state_mapping['hero_stacksize']] == params['stacksize']
assert state[:,-1][:,env.state_mapping['player2_stacksize']] == params['stacksize'] - 1
assert state[:,-1][:,env.state_mapping['amount_to_call']] == 1
self.assertAlmostEqual(state[:,-1][:,env.state_mapping['pot_odds']][0],0.333,places=2)
assert done == False
assert np.array_equal(mask,np.array([0., 1., 1., 0., 1.]))
assert np.array_equal(betsize_mask,np.array([1.,1.]))
state,obs,done,mask,betsize_mask = env.step(ACTION_FOLD)
assert done == True
assert env.players['SB'].stack == 6
assert env.players['BB'].stack == 5
assert env.players['BB'].status == Status.FOLDED
def testBetRaiseCall(self):
params = copy.deepcopy(self.env_params)
params['starting_street'] = pdt.Street.RIVER
params['stacksize'] = 5
params['pot'] = 1
env = Poker(params)
state,obs,done,mask,betsize_mask = env.reset()
state,obs,done,mask,betsize_mask = env.step(ACTION_BET)
assert state.ndim == 3
assert obs.ndim == 3
assert state.shape == (1,2,STATE_SHAPE)
assert obs.shape == (1,2,OBS_SHAPE)
assert env.players['SB'].stack == 5
assert env.players['BB'].stack == 4
assert env.players['SB'].street_total == 0
assert env.players['BB'].street_total == 1
assert env.pot == 2
assert state[:,-1][:,env.state_mapping['street']] == pdt.Street.RIVER
assert state[:,-1][:,env.state_mapping['hero_position']] == pdt.Position.SB
assert state[:,-1][:,env.state_mapping['last_position']] == pdt.Position.BB
assert state[:,-1][:,env.state_mapping['last_action']] == pdt.Action.BET
assert state[:,-1][:,env.state_mapping['last_betsize']] == 1
assert state[:,-1][:,env.state_mapping['hero_stacksize']] == 5
assert state[:,-1][:,env.state_mapping['player2_stacksize']] == 5 - 1
assert state[:,-1][:,env.state_mapping['amount_to_call']] == 1
self.assertAlmostEqual(state[:,-1][:,env.state_mapping['pot_odds']][0],0.333,places=2)
assert done == False
assert np.array_equal(mask,np.array([0., 1., 1., 0., 1.]))
assert np.array_equal(betsize_mask,np.array([1.,1.]))
state,obs,done,mask,betsize_mask = env.step(ACTION_RAISE)
assert state.ndim == 3
assert obs.ndim == 3
assert state.shape == (1,3,STATE_SHAPE)
assert obs.shape == (1,3,OBS_SHAPE)
assert env.players['SB'].stack == 1
assert env.players['BB'].stack == 4
assert env.players['SB'].street_total == 4
assert env.players['BB'].street_total == 1
assert env.pot == 6
assert state[:,-1][:,env.state_mapping['street']] == pdt.Street.RIVER
assert state[:,-1][:,env.state_mapping['hero_position']] == pdt.Position.BB
assert state[:,-1][:,env.state_mapping['last_position']] == pdt.Position.SB
assert state[:,-1][:,env.state_mapping['last_action']] == pdt.Action.RAISE
assert state[:,-1][:,env.state_mapping['last_betsize']] == 4
assert state[:,-1][:,env.state_mapping['hero_stacksize']] == self.env_params['stacksize'] - 1
assert state[:,-1][:,env.state_mapping['player2_stacksize']] == self.env_params['stacksize'] - 4
assert state[:,-1][:,env.state_mapping['amount_to_call']] == 3
self.assertAlmostEqual(state[:,-1][:,env.state_mapping['pot_odds']][0],0.33,places=2)
assert done == False
assert np.array_equal(mask,np.array([0., 1., 1., 0., 1.]))
assert np.array_equal(betsize_mask,np.array([1.,0.]))
state,obs,done,mask,betsize_mask = env.step(ACTION_CALL)
assert done == True
assert env.players['SB'].stack == 10
assert env.players['BB'].stack == 1
def testBetRestrictions(self):
params = copy.deepcopy(self.env_params)
params['starting_street'] = pdt.Street.RIVER
params['stacksize'] = 5
params['pot'] = 1
env = Poker(params)
state,obs,done,mask,betsize_mask = env.reset()
state,obs,done,mask,betsize_mask = env.step(ACTION_BET)
state,obs,done,mask,betsize_mask = env.step(ACTION_RAISE)
state,obs,done,mask,betsize_mask = env.step(ACTION_MIN_RAISE)
assert env.players['SB'].stack == 1
assert env.players['SB'].status == Status.ACTIVE
assert env.players['BB'].stack == 0
assert env.players['BB'].status == Status.ALLIN
assert state[0,-1,env.state_mapping['blind']] == pdt.Blind.NO_BLIND
assert np.array_equal(mask,np.array([0., 1., 1., 0., 0.]))
assert np.array_equal(betsize_mask,np.array([0.,0.]))
state,obs,done,mask,betsize_mask = env.step(ACTION_CALL)
assert done == True
assert env.players['SB'].stack == 11
assert env.players['BB'].stack == 0
def testTies(self):
params = copy.deepcopy(self.env_params)
params['starting_street'] = pdt.Street.RIVER
params['stacksize'] = 5
params['pot'] = 1
env = Poker(params)
state,obs,done,mask,betsize_mask = env.reset()
# Modify board and hands
env.board = [14,0,13,1,12,2,2,2,2,3]
env.players['SB'].hand = [[11,3],[10,3],[3,2],[3,3]]
env.players['BB'].hand = [[11,2],[10,2],[4,0],[4,3]]
state,obs,done,mask,betsize_mask = env.step(ACTION_BET)
state,obs,done,mask,betsize_mask = env.step(ACTION_CALL)
assert done == True
assert env.players['SB'].stack == 5.5
assert env.players['BB'].stack == 5.5
def testBlindInitialization(self):
params = copy.deepcopy(self.env_params)
params['starting_street'] = pdt.Street.PREFLOP
params['pot'] = 0
env = Poker(params)
state,obs,done,mask,betsize_mask = env.reset()
assert env.players['SB'].stack == 4.5
assert env.players['BB'].stack == 4.
assert env.players['SB'].street_total == 0.5
assert env.players['BB'].street_total == 1.
assert state[0,-1,env.state_mapping['blind']] == pdt.Blind.POSTED
assert state[:,-1][:,env.state_mapping['hero_position']] == pdt.Position.SB
assert state[:,-1][:,env.state_mapping['last_position']] == pdt.Position.BB
assert done == False
def testStreetIncrement(self):
params = copy.deepcopy(self.env_params)
params['starting_street'] = pdt.Street.TURN
params['pot'] = 1
env = Poker(params)
state,obs,done,mask,betsize_mask = env.reset()
assert env.board[-2] == 0
assert env.board[-1] == 0
state,obs,done,mask,betsize_mask = env.step(ACTION_BET)
state,obs,done,mask,betsize_mask = env.step(ACTION_CALL)
assert env.street == pdt.Street.RIVER
assert env.board[-2] != 0
state,obs,done,mask,betsize_mask = env.step(ACTION_BET)
state,obs,done,mask,betsize_mask = env.step(ACTION_CALL)
assert done == True
del env
params['starting_street'] = pdt.Street.PREFLOP
params['pot'] = 0
env = Poker(params)
state,obs,done,mask,betsize_mask = env.reset()
state,obs,done,mask,betsize_mask = env.step(ACTION_CALL)
assert state[:,-1][:,env.state_mapping['hero_position']] == pdt.Position.BB
assert state[:,-1][:,env.state_mapping['last_position']] == pdt.Position.SB
assert env.pot == 2
state,obs,done,mask,betsize_mask = env.step(ACTION_RAISE)
assert env.players['BB'].stack == 2
assert env.players['SB'].stack == 4
assert env.pot == 4
assert state[:,-1][:,env.state_mapping['hero_position']] == pdt.Position.SB
assert state[:,-1][:,env.state_mapping['last_position']] == pdt.Position.BB
state,obs,done,mask,betsize_mask = env.step(ACTION_CALL)
assert state[:,-1][:,env.state_mapping['hero_position']] == pdt.Position.BB
assert state[:,-1][:,env.state_mapping['player2_position']] == pdt.Position.SB
assert state[:,-1][:,env.state_mapping['last_position']] == pdt.Position.BTN
assert state[:,-1][:,env.state_mapping['last_aggressive_position']] == pdt.Position.BB
assert env.street == pdt.Street.FLOP
state,obs,done,mask,betsize_mask = env.step(ACTION_CHECK)
state,obs,done,mask,betsize_mask = env.step(ACTION_CHECK)
assert env.street == pdt.Street.TURN
state,obs,done,mask,betsize_mask = env.step(ACTION_CHECK)
state,obs,done,mask,betsize_mask = env.step(ACTION_CHECK)
assert env.street == pdt.Street.RIVER
state,obs,done,mask,betsize_mask = env.step(ACTION_CHECK)
state,obs,done,mask,betsize_mask = env.step(ACTION_CHECK)
assert done == True
def testThreePlayers(self):
params = copy.deepcopy(self.env_params)
params['n_players'] = 3
params['starting_street'] = pdt.Street.PREFLOP
params['pot'] = 0
env = Poker(params)
state,obs,done,mask,betsize_mask = env.reset()
assert state[:,-1][:,env.state_mapping['hero_position']] == pdt.Position.BTN
assert state[:,-1][:,env.state_mapping['player1_position']] == pdt.Position.BTN
assert state[:,-1][:,env.state_mapping['player2_position']] == pdt.Position.SB
assert state[:,-1][:,env.state_mapping['player3_position']] == pdt.Position.BB
assert env.street == pdt.Street.PREFLOP
assert env.players.num_active_players == 3
state,obs,done,mask,betsize_mask = env.step(ACTION_RAISE)
assert env.players['SB'].stack == 4.5
assert env.players['BB'].stack == 4.
assert env.players['BTN'].stack == 1.5
assert env.players['SB'].street_total == 0.5
assert env.players['BB'].street_total == 1.
assert env.players['BTN'].street_total == 3.5
state,obs,done,mask,betsize_mask = env.step(ACTION_FOLD)
assert env.players['SB'].status == Status.FOLDED
assert env.players['BB'].status == Status.ACTIVE
assert env.players['BTN'].status == Status.ACTIVE
state,obs,done,mask,betsize_mask = env.step(ACTION_CALL)
assert env.players['SB'].stack == 4.5
assert env.players['BB'].stack == 1.5
assert env.players['BTN'].stack == 1.5
assert env.players['SB'].street_total == 0.
assert env.players['BB'].street_total == 0.
assert env.players['BTN'].street_total == 0.
assert state[:,-1][:,env.state_mapping['pot']] == 7.5
assert env.pot == 7.5
assert env.street == pdt.Street.FLOP
assert env.players.num_active_players == 2
assert state[:,-1][:,env.state_mapping['hero_position']] == pdt.Position.BB
state,obs,done,mask,betsize_mask = env.step(ACTION_CHECK)
state,obs,done,mask,betsize_mask = env.step(ACTION_CHECK)
assert env.street == pdt.Street.TURN
state,obs,done,mask,betsize_mask = env.step(ACTION_CHECK)
state,obs,done,mask,betsize_mask = env.step(ACTION_CHECK)
assert env.street == pdt.Street.RIVER
state,obs,done,mask,betsize_mask = env.step(ACTION_CHECK)
state,obs,done,mask,betsize_mask = env.step(ACTION_CHECK)
assert done == True
assert env.players['SB'].stack == 4.5
assert env.players['BB'].stack == 9
assert env.players['BTN'].stack == 1.5
del env
params['n_players'] = 3
params['starting_street'] = pdt.Street.PREFLOP
params['pot'] = 0
env = Poker(params)
state,obs,done,mask,betsize_mask = env.reset()
assert state[:,-1][:,env.state_mapping['hero_position']] == pdt.Position.BTN
state,obs,done,mask,betsize_mask = env.step(ACTION_CALL)
assert state[:,-1][:,env.state_mapping['hero_position']] == pdt.Position.SB
assert env.players['SB'].street_total == 0.5
state,obs,done,mask,betsize_mask = env.step(ACTION_CALL)
assert env.players['SB'].street_total == 1
assert state[:,-1][:,env.state_mapping['hero_position']] == pdt.Position.BB
state,obs,done,mask,betsize_mask = env.step(ACTION_CHECK)
assert env.street == pdt.Street.FLOP
assert env.pot == 3
state,obs,done,mask,betsize_mask = env.step(ACTION_CHECK)
state,obs,done,mask,betsize_mask = env.step(ACTION_CHECK)
state,obs,done,mask,betsize_mask = env.step(ACTION_CHECK)
assert env.street == pdt.Street.TURN
state,obs,done,mask,betsize_mask = env.step(ACTION_CHECK)
state,obs,done,mask,betsize_mask = env.step(ACTION_CHECK)
state,obs,done,mask,betsize_mask = env.step(ACTION_CHECK)
assert env.street == pdt.Street.RIVER
state,obs,done,mask,betsize_mask = env.step(ACTION_CHECK)
state,obs,done,mask,betsize_mask = env.step(ACTION_CHECK)
state,obs,done,mask,betsize_mask = env.step(ACTION_CHECK)
assert done == True
assert env.players['SB'].stack == 7
assert env.players['BB'].stack == 4
assert env.players['BTN'].stack == 4.
del env
params['n_players'] = 3
params['starting_street'] = pdt.Street.PREFLOP
params['pot'] = 0
env = Poker(params)
state,obs,done,mask,betsize_mask = env.reset()
state,obs,done,mask,betsize_mask = env.step(ACTION_RAISE)
state,obs,done,mask,betsize_mask = env.step(ACTION_FOLD)
state,obs,done,mask,betsize_mask = env.step(ACTION_FOLD)
assert done == True
assert env.players['SB'].stack == 4.5
assert env.players['BB'].stack == 4
assert env.players['BTN'].stack == 6.5
def testBetLimits(self):
params = copy.deepcopy(self.env_params)
# Limit
params['bet_type'] = pdt.LimitTypes.LIMIT
params['n_players'] = 3
params['starting_street'] = pdt.Street.PREFLOP
params['pot'] = 0
env = Poker(params)
state,obs,done,mask,betsize_mask = env.reset()
assert state[:,-1][:,env.state_mapping['pot']] == 1.5
assert state[:,-1][:,env.state_mapping['hero_position']] == pdt.Position.BTN
state,obs,done,mask,betsize_mask = env.step(ACTION_RAISE)
assert state[:,-1][:,env.state_mapping['pot']] == 3.5
assert env.players['BTN'].stack == 3
assert env.players['BB'].stack == 4
assert env.players['SB'].stack == 4.5
assert env.players['SB'].street_total == 0.5
assert state[:,-1][:,env.state_mapping['hero_position']] == pdt.Position.SB
assert state[:,-1][:,env.state_mapping['last_aggressive_betsize']] == 2
assert env.street == pdt.Street.PREFLOP
state,obs,done,mask,betsize_mask = env.step(ACTION_RAISE)
assert env.players['BTN'].stack == 3
assert env.players['BB'].stack == 4
assert env.players['SB'].stack == 2
assert env.players['SB'].street_total == 3.
assert state[:,-1][:,env.state_mapping['hero_position']] == pdt.Position.BB
assert state[:,-1][:,env.state_mapping['last_aggressive_betsize']] == 2.5
assert state[:,-1][:,env.state_mapping['pot']] == 6
assert env.street == pdt.Street.PREFLOP
state,obs,done,mask,betsize_mask = env.step(ACTION_CALL)
assert state[:,-1][:,env.state_mapping['pot']] == 8
assert state[:,-1][:,env.state_mapping['hero_position']] == pdt.Position.BTN
assert env.street == pdt.Street.PREFLOP
state,obs,done,mask,betsize_mask = env.step(ACTION_CALL)
assert state[:,-1][:,env.state_mapping['pot']] == 9
assert state[:,-1][:,env.state_mapping['hero_position']] == pdt.Position.SB
assert env.street == pdt.Street.FLOP
del env
params['bet_type'] = pdt.LimitTypes.POT_LIMIT
params['n_players'] = 3
params['starting_street'] = pdt.Street.PREFLOP
params['pot'] = 0
params['stacksize'] = 100
env = Poker(params)
state,obs,done,mask,betsize_mask = env.reset()
assert state[:,-1][:,env.state_mapping['hero_position']] == pdt.Position.BTN
state,obs,done,mask,betsize_mask = env.step(ACTION_RAISE)
assert state[:,-1][:,env.state_mapping['hero_position']] == pdt.Position.SB
assert env.players['BTN'].stack == 96.5
assert env.players['BTN'].street_total == 3.5
state,obs,done,mask,betsize_mask = env.step(ACTION_RAISE)
assert env.players['SB'].stack == 88.5
assert env.players['SB'].street_total == 11.5
assert state[:,-1][:,env.state_mapping['last_aggressive_betsize']] == 11
assert state[:,-1][:,env.state_mapping['pot']] == 16
state,obs,done,mask,betsize_mask = env.step(ACTION_FOLD)
state,obs,done,mask,betsize_mask = env.step(ACTION_RAISE)
assert env.players['BTN'].stack == 64.5
assert env.players['BTN'].street_total == 35.5
assert state[:,-1][:,env.state_mapping['last_aggressive_betsize']] == 32
assert state[:,-1][:,env.state_mapping['pot']] == 48
del env
params['bet_type'] = pdt.LimitTypes.POT_LIMIT
params['n_players'] = 3
params['starting_street'] = pdt.Street.PREFLOP
params['pot'] = 0
params['stacksize'] = 100
env = Poker(params)
state,obs,done,mask,betsize_mask = env.reset()
state,obs,done,mask,betsize_mask = env.step(ACTION_CALL)
state,obs,done,mask,betsize_mask = env.step(ACTION_RAISE)
assert env.players['SB'].stack == 96
assert env.players['SB'].street_total == 4
state,obs,done,mask,betsize_mask = env.step(ACTION_CALL)
state,obs,done,mask,betsize_mask = env.step(ACTION_CALL)
assert env.street == pdt.Street.FLOP
assert state[:,-1][:,env.state_mapping['pot']] == 12
state,obs,done,mask,betsize_mask = env.step(ACTION_BET)
assert state[:,-1][:,env.state_mapping['pot']] == 24
assert env.players['SB'].stack == 84
assert env.players['SB'].street_total == 12
state,obs,done,mask,betsize_mask = env.step(ACTION_RAISE)
assert env.players['BB'].stack == 48
assert env.players['BB'].street_total == 48
assert state[:,-1][:,env.state_mapping['pot']] == 72
#TODO No Limit
# params['bet_limit'] = pdt.LimitTypes.NO_LIMIT
# params['n_players'] = 3
# params['starting_street'] = pdt.Street.PREFLOP
# params['pot'] = 0
# env = Poker(params)
# state,obs,done,mask,betsize_mask = env.reset()
# del env
# Pot Limit
# Test reraise, preflop raise, sb raise preflop. raise vs bet
def testAllin(self):
params = copy.deepcopy(self.env_params)
params['n_players'] = 3
params['starting_street'] = pdt.Street.PREFLOP
params['pot'] = 0
env = Poker(params)
state,obs,done,mask,betsize_mask = env.reset()
state,obs,done,mask,betsize_mask = env.step(ACTION_RAISE)
assert env.players['BTN'].stack == 1.5
assert env.players['BTN'].street_total == 3.5
state,obs,done,mask,betsize_mask = env.step(ACTION_FOLD)
assert env.players['SB'].status == Status.FOLDED
state,obs,done,mask,betsize_mask = env.step(ACTION_RAISE)
assert env.players['BB'].stack == 0
assert env.players['BB'].street_total == 5
assert env.players['BB'].status == Status.ALLIN
state,obs,done,mask,betsize_mask = env.step(ACTION_CALL)
assert env.players['BB'].stack == 10.5
assert env.players['SB'].stack == 4.5
assert env.players['BTN'].stack == 0
assert env.players['BTN'].street_total == 0
assert env.street == pdt.Street.RIVER
assert done == True
def testActor(self):
params = copy.deepcopy(self.env_params)
env = Poker(params)
nA = env.action_space
nB = env.betsize_space
nS = env.state_space
seed = 152
actor = OmahaActor(seed,nS,nA,nB,self.network_params)
state,obs,done,mask,betsize_mask = env.reset()
output = actor(state,mask,betsize_mask)
state,obs,done,mask,betsize_mask = env.step(ACTION_BET)
output = actor(state,mask,betsize_mask)
assert isinstance(output['action_probs'],torch.Tensor)
assert isinstance(output['action_prob'],torch.Tensor)
def testCritic(self):
params = copy.deepcopy(self.env_params)
env = Poker(params)
nA = env.action_space
nB = env.betsize_space
nS = env.state_space
seed = 152
critic = OmahaObsQCritic(seed,nS,nA,nB,self.network_params)
state,obs,done,mask,betsize_mask = env.reset()
output = critic(obs)
assert isinstance(output['value'],torch.Tensor)
# def testCombined(self):
# params = copy.deepcopy(self.env_params)
# env = Poker(params)
# nA = env.action_space
# nB = env.betsize_space
# nS = env.state_space
# seed = 152
# device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# params['device'] = device
# params['maxlen'] = 10
# params['embedding_size'] = 128
# params['transformer_in'] = 7718
# params['transformer_out'] = 128
# net = CombinedNet(seed,nS,nA,nB,params)
# state,obs,done,mask,betsize_mask = env.reset()
# output = net(state,mask,betsize_mask)
# assert isinstance(output['value'],torch.Tensor)
def testMasks(self):
params = copy.deepcopy(self.env_params)
params['stacksize'] = 5
params['n_players'] = 2
params['starting_street'] = pdt.Street.PREFLOP
params['pot'] = 0
env = Poker(params)
state,obs,done,mask,betsize_mask = env.reset()
assert state[:,-1][:,env.state_mapping['pot']] == 1.5
assert state[:,-1][:,env.state_mapping['player1_stacksize']] == 4.5
assert state[:,-1][:,env.state_mapping['player1_position']] == pdt.Position.SB
assert state[:,-1][:,env.state_mapping['player2_stacksize']] == 4
assert state[:,-1][:,env.state_mapping['player2_position']] == pdt.Position.BB
assert state[:,-1][:,env.state_mapping['street']] == pdt.Street.PREFLOP
assert env.current_player == 'SB'
assert np.array_equal(betsize_mask,np.array([1,1]))
assert np.array_equal(mask,np.array([0,1,1,0,1]))
state,obs,done,mask,betsize_mask = env.step(ACTION_RAISE)
assert env.current_player == 'BB'
assert state[:,-1][:,env.state_mapping['pot']] == 4
assert state[:,-1][:,env.state_mapping['player1_stacksize']] == 4
assert state[:,-1][:,env.state_mapping['player1_position']] == pdt.Position.BB
assert state[:,-1][:,env.state_mapping['player2_stacksize']] == 2
assert state[:,-1][:,env.state_mapping['player2_position']] == pdt.Position.SB
assert state[:,-1][:,env.state_mapping['street']] == pdt.Street.PREFLOP
assert np.array_equal(mask,np.array([0,1,1,0,1]))
assert np.array_equal(betsize_mask,np.array([1,0]))
state,obs,done,mask,betsize_mask = env.step(ACTION_RAISE)
assert state[:,-1][:,env.state_mapping['pot']] == 8
assert state[:,-1][:,env.state_mapping['player1_stacksize']] == 2
assert state[:,-1][:,env.state_mapping['player1_position']] == pdt.Position.SB
assert state[:,-1][:,env.state_mapping['player2_stacksize']] == 0
assert state[:,-1][:,env.state_mapping['player2_position']] == pdt.Position.BB
assert state[:,-1][:,env.state_mapping['street']] == pdt.Street.PREFLOP
assert np.array_equal(mask,np.array([0,1,1,0,0]))
assert np.array_equal(betsize_mask,np.array([0,0]))
def testEnvCategoryMapping(self):
params = copy.deepcopy(self.env_params)
params['stacksize'] = 50
params['n_players'] = 2
params['starting_street'] = pdt.Street.PREFLOP
params['pot'] = 0
env = Poker(params)
state,obs,done,mask,betsize_mask = env.reset()
assert env.convert_to_category(pdt.NetworkActions.RAISE,3)[0] == 4
assert env.convert_to_category(pdt.NetworkActions.RAISE,2)[0] == 3
assert env.convert_to_category(pdt.NetworkActions.CALL,0.5)[0] == 2
assert env.convert_to_category(pdt.NetworkActions.CHECK,0)[0] == 0
state,obs,done,mask,betsize_mask = env.step(ACTION_RAISE)
assert env.convert_to_category(pdt.NetworkActions.RAISE,9)[0] == 4
assert env.convert_to_category(pdt.NetworkActions.RAISE,5)[0] == 3
assert env.convert_to_category(pdt.NetworkActions.CALL,2)[0] == 2
assert env.convert_to_category(pdt.NetworkActions.CHECK,0)[0] == 0
state,obs,done,mask,betsize_mask = env.step(ACTION_CALL)
assert env.convert_to_category(pdt.NetworkActions.BET,6)[0] == 4
assert env.convert_to_category(pdt.NetworkActions.BET,3)[0] == 3
assert env.convert_to_category(pdt.NetworkActions.FOLD,0)[0] == 1
state,obs,done,mask,betsize_mask = env.step(ACTION_BET)
assert env.convert_to_category(pdt.NetworkActions.RAISE,24)[0] == 4
assert env.convert_to_category(pdt.NetworkActions.RAISE,12)[0] == 3
assert env.convert_to_category(pdt.NetworkActions.CALL,6)[0] == 2
assert env.convert_to_category(pdt.NetworkActions.FOLD,0)[0] == 1
state,obs,done,mask,betsize_mask = env.step(ACTION_RAISE)
assert env.convert_to_category(pdt.NetworkActions.RAISE,47)[0] == 4
assert env.convert_to_category(pdt.NetworkActions.RAISE,42)[0] == 3
assert env.convert_to_category(pdt.NetworkActions.CALL,18)[0] == 2
assert env.convert_to_category(pdt.NetworkActions.FOLD,0)[0] == 1
del env
params['stacksize'] = 3
params['n_players'] = 2
params['starting_street'] = pdt.Street.PREFLOP
params['pot'] = 0
env = Poker(params)
state,obs,done,mask,betsize_mask = env.reset()
assert env.convert_to_category(pdt.NetworkActions.RAISE,3)[0] == 4
print('check',env.convert_to_category(pdt.NetworkActions.RAISE,2)[0])
assert env.convert_to_category(pdt.NetworkActions.RAISE,2)[0] == 3
assert env.convert_to_category(pdt.NetworkActions.CALL,0)[0] == 2
assert env.convert_to_category(pdt.NetworkActions.FOLD,0)[0] == 1
def testStreetInitialization(self):
params = copy.deepcopy(self.env_params)
params['stacksize'] = 50
params['n_players'] = 2
params['starting_street'] = pdt.Street.RIVER
params['pot'] = 1
env = Poker(params)
state,obs,done,mask,betsize_mask = env.reset()
assert state[:,-1][:,env.state_mapping['player1_position']] == pdt.Position.BB
assert state[:,-1][:,env.state_mapping['hero_position']] == pdt.Position.BB
def additionalTests(self):
params = copy.deepcopy(self.env_params)
params['stacksize'] = 5
params['n_players'] = 2
params['starting_street'] = pdt.Street.PREFLOP
params['pot'] = 0
env = Poker(params)
state,obs,done,mask,betsize_mask = env.reset()
state,obs,done,mask,betsize_mask = env.step(ACTION_RAISE)
assert state[:,-1][:,env.state_mapping['player2_stacksize']] == 2
state,obs,done,mask,betsize_mask = env.step(ACTION_CALL)
assert state[:,-1][:,env.state_mapping['player1_stacksize']] == 2
assert state[:,-1][:,env.state_mapping['street']] == 1
state,obs,done,mask,betsize_mask = env.step(ACTION_BET)
state,obs,done,mask,betsize_mask = env.step(ACTION_CALL)
assert state[:,-1][:,env.state_mapping['player1_stacksize']] == 0
assert state[:,-1][:,env.state_mapping['player2_stacksize']] == 0
assert state[:,-1][:,env.state_mapping['street']] == 3
assert done == True
def preflopTests(self):
"""Facing sb call. Sb min raise."""
params = copy.deepcopy(self.env_params)
params['stacksize'] = 5
params['n_players'] = 2
params['starting_street'] = pdt.Street.PREFLOP
params['pot'] = 0
env = Poker(params)
state,obs,done,mask,betsize_mask = env.reset()
assert np.array_equal(mask,np.array([0,1,1,0,1]))
state,obs,done,mask,betsize_mask = env.step(ACTION_CALL)
assert state[:,-1][:,env.state_mapping['player2_stacksize']] == 4
assert state[:,-1][:,env.state_mapping['player1_stacksize']] == 4
assert np.array_equal(mask,np.array([1,0,0,0,1]))
del env
env = Poker(params)
state,obs,done,mask,betsize_mask = env.reset()
state,obs,done,mask,betsize_mask = env.step(ACTION_MIN_RAISE)
assert state[:,-1][:,env.state_mapping['player2_stacksize']] == 3
assert state[:,-1][:,env.state_mapping['player1_stacksize']] == 4
def betsizingTests(self):
params = copy.deepcopy(self.env_params)
params['stacksize'] = 5
params['n_players'] = 2
params['starting_street'] = pdt.Street.PREFLOP
params['pot'] = 0
env = Poker(params)
state,obs,done,mask,betsize_mask = env.reset()
betsize = env.return_potlimit_betsize(action=4,betsize_category=0)
assert betsize == 1.5
betsize = env.return_potlimit_betsize(action=4,betsize_category=1)
assert betsize == 2.5
betsize = env.return_potlimit_betsize(action=2,betsize_category=0)
assert betsize == 0.5
state,obs,done,mask,betsize_mask = env.step(ACTION_CALL)
betsize = env.return_potlimit_betsize(action=4,betsize_category=0)
assert betsize == 1
betsize = env.return_potlimit_betsize(action=4,betsize_category=1)
assert betsize == 2
# def testOutcome(self):
# params = self.env_params
# params['stacksize'] = 5
# params['starting_street'] = pdt.Street.TURN
# params['pot'] = 1
# env = Poker(self.env_params)
# state,obs,done,mask,betsize_mask = env.reset()
# env.players['SB'].hand = [[7, 1], [5, 3], [14, 2], [10, 2]]
# env.players['BB'].hand = [[14, 3], [2, 1], [2, 4], [11, 1]]
# env.board = [[10, 3], [2, 2], [4, 3], [13, 3], [4, 2]]
# state,obs,done,mask,betsize_mask = env.step(ACTION_BET)
# state,obs,done,mask,betsize_mask = env.step(ACTION_CALL)
# assert state[:,-1][:,env.state_mapping['player2_stacksize']] == 7
# assert state[:,-1][:,env.state_mapping['player1_stacksize']] == 4
# assert done == True
def envTestSuite():
suite = unittest.TestSuite()
suite.addTest(TestEnv('testInitialization'))
suite.addTest(TestEnv('testReset'))
suite.addTest(TestEnv('testCheckCheck'))
suite.addTest(TestEnv('testCheckBetFold'))
suite.addTest(TestEnv('testBetRaiseCall'))
suite.addTest(TestEnv('testBetRestrictions'))
suite.addTest(TestEnv('testTies'))
suite.addTest(TestEnv('testBlindInitialization'))
suite.addTest(TestEnv('testStreetIncrement'))
suite.addTest(TestEnv('testThreePlayers'))
suite.addTest(TestEnv('testBetLimits'))
suite.addTest(TestEnv('testAllin'))
suite.addTest(TestEnv('testActor'))
suite.addTest(TestEnv('testCritic'))
# suite.addTest(TestEnv('testCombined'))
suite.addTest(TestEnv('testMasks'))
suite.addTest(TestEnv('testEnvCategoryMapping'))
suite.addTest(TestEnv('testStreetInitialization'))
suite.addTest(TestEnv('additionalTests'))
suite.addTest(TestEnv('testPreflop'))
suite.addTest(TestEnv('testBetsizing'))
suite.addTest(TestEnv('testOutcome'))
return suite
if __name__ == "__main__":
runner = unittest.TextTestRunner(verbosity=2)
runner.run(envTestSuite())
| 48.531136
| 170
| 0.628626
| 5,322
| 39,747
| 4.547914
| 0.044344
| 0.055032
| 0.077467
| 0.08197
| 0.86465
| 0.851223
| 0.833044
| 0.799992
| 0.778797
| 0.751157
| 0
| 0.025783
| 0.211538
| 39,747
| 819
| 171
| 48.531136
| 0.746546
| 0.042972
| 0
| 0.633288
| 0
| 0
| 0.092264
| 0.005449
| 0
| 0
| 0
| 0.001221
| 0.461434
| 1
| 0.031123
| false
| 0
| 0.014885
| 0
| 0.050068
| 0.001353
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a902d23485f45a5cc3457e98b23a0779592379cc
| 292
|
py
|
Python
|
crawler/crawler/treewalk/scheduler/__init__.py
|
amosproj/amos-ss2020-metadata-hub
|
f8434b27b306332c117a8dd20a8a55a3104d0f89
|
[
"MIT"
] | 9
|
2020-04-23T14:22:48.000Z
|
2022-02-25T21:35:05.000Z
|
crawler/crawler/treewalk/scheduler/__init__.py
|
amosproj/amos-ss2020-metadata-hub
|
f8434b27b306332c117a8dd20a8a55a3104d0f89
|
[
"MIT"
] | 42
|
2020-04-24T17:59:33.000Z
|
2022-02-16T01:09:23.000Z
|
crawler/crawler/treewalk/scheduler/__init__.py
|
amosproj/amos-ss2020-metadata-hub
|
f8434b27b306332c117a8dd20a8a55a3104d0f89
|
[
"MIT"
] | 2
|
2020-08-17T11:19:44.000Z
|
2021-04-30T08:32:05.000Z
|
from .interface import shutdown
from .interface import add_config
from .interface import get_schedule
from .interface import remove_config
from .interface import add_interval
from .interface import remove_interval
from .interface import get_intervals
from .scheduler import TreeWalkScheduler
| 32.444444
| 40
| 0.863014
| 38
| 292
| 6.473684
| 0.342105
| 0.369919
| 0.54065
| 0.178862
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109589
| 292
| 8
| 41
| 36.5
| 0.946154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a9049c8dba8498d7bb80ddcf6371defb66cee1d1
| 10,108
|
py
|
Python
|
tests/snapshots/snap_test_schema_to_typed_dict.py
|
jwalterclark/avro-to-python-types
|
14ffbd24e228ccefe578a5a36435d18d9346b634
|
[
"MIT"
] | 7
|
2020-12-12T20:19:28.000Z
|
2020-12-14T21:28:28.000Z
|
tests/snapshots/snap_test_schema_to_typed_dict.py
|
dangreenisrael/avro-to-python-types
|
d0e0657367348c7ffcd1af2fcc06fad27345cd33
|
[
"MIT"
] | 5
|
2021-02-10T21:58:55.000Z
|
2021-11-01T16:02:01.000Z
|
tests/snapshots/snap_test_schema_to_typed_dict.py
|
dangreenisrael/avro-to-python-types
|
d0e0657367348c7ffcd1af2fcc06fad27345cd33
|
[
"MIT"
] | 2
|
2021-02-18T22:15:39.000Z
|
2021-09-19T00:05:25.000Z
|
# -*- coding: utf-8 -*-
# snapshottest: v1 - https://goo.gl/zC4yUc
from __future__ import unicode_literals
from snapshottest import Snapshot
snapshots = Snapshot()
snapshots['SnapshotTypedDictArrayFromSchemaFile::test_array_map_schemas com.wave.Order.avsc'] = '''from enum import Enum
from typing import List, Optional, TypedDict
class ComWaveProduct_status(Enum):
AVAILABLE = "AVAILABLE"
OUT_OF_STOCK = "OUT_OF_STOCK"
ONLY_FEW_LEFT = "ONLY_FEW_LEFT"
class ComWaveProduct(TypedDict, total=False):
product_id: int
product_name: str
product_description: Optional[str]
product_status: ComWaveProduct_status
product_category: List[str]
price: float
product_hash: str
class ComWaveOrderDetail(TypedDict, total=False):
quantity: int
total: float
product_detail: ComWaveProduct
class ComWaveOrder(TypedDict, total=False):
order_id: int
customer_id: int
total: float
order_details: List[ComWaveOrderDetail]
'''
snapshots['SnapshotTypedDictArrayFromSchemaFile::test_array_map_schemas com.wave.OrderDetail.avsc'] = '''from enum import Enum
from typing import List, Optional, TypedDict
class ComWaveProduct_status(Enum):
AVAILABLE = "AVAILABLE"
OUT_OF_STOCK = "OUT_OF_STOCK"
ONLY_FEW_LEFT = "ONLY_FEW_LEFT"
class ComWaveProduct(TypedDict, total=False):
product_id: int
product_name: str
product_description: Optional[str]
product_status: ComWaveProduct_status
product_category: List[str]
price: float
product_hash: str
class ComWaveOrderDetail(TypedDict, total=False):
quantity: int
total: float
product_detail: ComWaveProduct
'''
snapshots['SnapshotTypedDictArrayFromSchemaFile::test_array_map_schemas com.wave.Product.avsc'] = '''from enum import Enum
from typing import List, Optional, TypedDict
class ComWaveProduct_status(Enum):
AVAILABLE = "AVAILABLE"
OUT_OF_STOCK = "OUT_OF_STOCK"
ONLY_FEW_LEFT = "ONLY_FEW_LEFT"
class ComWaveProduct(TypedDict, total=False):
product_id: int
product_name: str
product_description: Optional[str]
product_status: ComWaveProduct_status
product_category: List[str]
price: float
product_hash: str
'''
snapshots['SnapshotTypedDictFromSchemaFile::test_snapshot_expandable_schemas common.ChildA.avsc'] = '''from typing import Optional, TypedDict
class CommonChildA(TypedDict, total=False):
name: str
favorite_number: Optional[int]
favorite_color: Optional[str]
'''
snapshots['SnapshotTypedDictFromSchemaFile::test_snapshot_expandable_schemas common.ChildB.avsc'] = '''from datetime import date, datetime, time
from decimal import Decimal
from typing import TypedDict
from uuid import UUID
class CommonChildB(TypedDict, total=False):
streetaddress: str
city: str
birthdate: date
appt_date: date
time_of_day_birth: time
timestamp_of_birth: datetime
uuid_of_birth_record: UUID
weight: Decimal
'''
snapshots['SnapshotTypedDictFromSchemaFile::test_snapshot_expandable_schemas common.ChildC.avsc'] = '''from datetime import date, datetime, time
from decimal import Decimal
from enum import Enum
from typing import Optional, TypedDict
from uuid import UUID
class CommonSchool(Enum):
StBonifice = "StBonifice"
HogWarts = "HogWarts"
HardKnocks = "HardKnocks"
UnseenUniversity = "UnseenUniversity"
class CommonEyeColor(Enum):
green = "green"
brown = "brown"
blue = "blue"
class CommonChildC(TypedDict, total=False):
streetaddress: Optional[str]
city: Optional[str]
birthdate: date
appt_date: date
time_of_day_birth: Optional[time]
timestamp_of_birth: datetime
uuid_of_birth_record: UUID
weight: Decimal
timestamp_of_first_checkup: Optional[datetime]
school: CommonSchool
eye_color: Optional[CommonEyeColor]
'''
snapshots['SnapshotTypedDictFromSchemaFile::test_snapshot_expandable_schemas domain.Parent.avsc'] = '''from datetime import date, datetime, time
from decimal import Decimal
from typing import Optional, TypedDict
from uuid import UUID
class CommonChildA(TypedDict, total=False):
name: str
favorite_number: Optional[int]
favorite_color: Optional[str]
class CommonChildB(TypedDict, total=False):
streetaddress: str
city: str
birthdate: date
appt_date: date
time_of_day_birth: time
timestamp_of_birth: datetime
uuid_of_birth_record: UUID
weight: Decimal
class DomainCompositeItem(TypedDict, total=False):
composite_a: CommonChildA
composite_b: CommonChildB
class DomainParent(TypedDict, total=False):
first_item: CommonChildA
second_item: CommonChildA
composite_item: DomainCompositeItem
favorite_color: Optional[str]
'''
snapshots['SnapshotTypedDictFromSchemaFile::test_snapshot_schema_references com.wave.Order.avsc'] = '''from enum import Enum
from typing import List, Optional, TypedDict
class ComWaveProduct_status(Enum):
AVAILABLE = "AVAILABLE"
OUT_OF_STOCK = "OUT_OF_STOCK"
ONLY_FEW_LEFT = "ONLY_FEW_LEFT"
class ComWaveProduct(TypedDict, total=False):
product_id: int
product_name: str
product_description: Optional[str]
product_status: ComWaveProduct_status
product_category: List[str]
price: float
product_hash: str
class ComWaveOrderDetail(TypedDict, total=False):
quantity: int
total: float
product_detail: ComWaveProduct
class ComWaveOrder(TypedDict, total=False):
order_id: int
customer_id: int
total: float
order_details: List[ComWaveOrderDetail]
'''
snapshots['SnapshotTypedDictFromSchemaFile::test_snapshot_self_contained_schemas nested_record.avsc'] = '''from typing import Optional, TypedDict
class ExampleAvroAddressUSRecord(TypedDict, total=False):
streetaddress: str
city: str
class ExampleAvroUser(TypedDict, total=False):
name: str
favorite_number: Optional[int]
favorite_color: Optional[str]
address: ExampleAvroAddressUSRecord
'''
snapshots['SnapshotTypedDictFromSchemaFile::test_snapshot_self_contained_schemas nested_records.avsc'] = '''from typing import Optional, TypedDict
class ExampleAddressUSRecord(TypedDict, total=False):
streetaddress: str
city: str
class ExampleOtherThing(TypedDict, total=False):
thing1: str
thing2: Optional[int]
class ExampleUser(TypedDict, total=False):
name: str
favorite_number: Optional[int]
favorite_color: Optional[str]
address: ExampleAddressUSRecord
other_thing: ExampleOtherThing
'''
snapshots['SnapshotTypedDictFromSchemaFile::test_snapshot_self_contained_schemas nested_records_deep.avsc'] = '''from typing import Optional, TypedDict
class ExampleAvroAddressUSRecord(TypedDict, total=False):
streetaddress: str
city: str
class ExampleAvroNextOtherThing(TypedDict, total=False):
thing1: str
thing2: Optional[int]
class ExampleAvroOtherThing(TypedDict, total=False):
thing1: str
other_thing: ExampleAvroNextOtherThing
class ExampleAvroUser(TypedDict, total=False):
name: str
favorite_number: Optional[int]
favorite_color: Optional[str]
address: ExampleAvroAddressUSRecord
other_thing: ExampleAvroOtherThing
'''
snapshots['SnapshotTypedDictFromSchemaFile::test_snapshot_self_contained_schemas no_optional_field_record.avsc'] = '''from typing import TypedDict
class ExampleAvroAnotherExample(TypedDict, total=False):
id: str
'''
snapshots['SnapshotTypedDictFromSchemaString::test_snapshot_all_schemas nested_record.avsc'] = '''from typing import Optional, TypedDict
class ExampleAvroAddressUSRecord(TypedDict, total=False):
streetaddress: str
city: str
class ExampleAvroUser(TypedDict, total=False):
name: str
favorite_number: Optional[int]
favorite_color: Optional[str]
address: ExampleAvroAddressUSRecord
'''
snapshots['SnapshotTypedDictFromSchemaString::test_snapshot_all_schemas nested_records.avsc'] = '''from typing import Optional, TypedDict
class ExampleAddressUSRecord(TypedDict, total=False):
streetaddress: str
city: str
class ExampleOtherThing(TypedDict, total=False):
thing1: str
thing2: Optional[int]
class ExampleUser(TypedDict, total=False):
name: str
favorite_number: Optional[int]
favorite_color: Optional[str]
address: ExampleAddressUSRecord
other_thing: ExampleOtherThing
'''
snapshots['SnapshotTypedDictFromSchemaString::test_snapshot_all_schemas nested_records_deep.avsc'] = '''from typing import Optional, TypedDict
class ExampleAvroAddressUSRecord(TypedDict, total=False):
streetaddress: str
city: str
class ExampleAvroNextOtherThing(TypedDict, total=False):
thing1: str
thing2: Optional[int]
class ExampleAvroOtherThing(TypedDict, total=False):
thing1: str
other_thing: ExampleAvroNextOtherThing
class ExampleAvroUser(TypedDict, total=False):
name: str
favorite_number: Optional[int]
favorite_color: Optional[str]
address: ExampleAvroAddressUSRecord
other_thing: ExampleAvroOtherThing
'''
snapshots['SnapshotTypedDictFromSchemaString::test_snapshot_all_schemas no_optional_field_record.avsc'] = '''from typing import TypedDict
class ExampleAvroAnotherExample(TypedDict, total=False):
id: str
'''
snapshots['SnapshotTypedDictMapAndFixedFromSchemaFile::test_array_map_schemas org.apache.avro.ipc.HandshakeRequest.avsc'] = '''from typing import Dict, Optional, TypedDict
class OrgApacheAvroIpcHandshakeRequest(TypedDict, total=False):
clientHash: bytes
clientProtocol: Optional[str]
serverHash: bytes
meta: Optional[Dict[str, bytes]]
'''
snapshots['SnapshotTypedDictMapAndFixedFromSchemaFile::test_array_map_schemas org.apache.avro.ipc.HandshakeResponse.avsc'] = '''from enum import Enum
from typing import Dict, Optional, TypedDict
class OrgApacheAvroIpcHandshakeMatch(Enum):
BOTH = "BOTH"
CLIENT = "CLIENT"
NONE = "NONE"
class OrgApacheAvroIpcHandshakeResponse(TypedDict, total=False):
match: OrgApacheAvroIpcHandshakeMatch
serverProtocol: Optional[str]
serverHash: Optional[bytes]
meta: Optional[Dict[str, bytes]]
'''
| 26.530184
| 171
| 0.767214
| 1,085
| 10,108
| 6.954839
| 0.140092
| 0.070501
| 0.09568
| 0.026504
| 0.862576
| 0.862576
| 0.835807
| 0.796316
| 0.715081
| 0.700239
| 0
| 0.001514
| 0.150673
| 10,108
| 380
| 172
| 26.6
| 0.877461
| 0.006134
| 0
| 0.779026
| 0
| 0
| 0.946829
| 0.317335
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.131086
| 0
| 0.131086
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8bfa673a2e285716860918117463432bb1279bff
| 852
|
py
|
Python
|
src/tests/pantilt.py
|
duckida/legosort
|
8fb6a3810056ced7b09fa7e5db42148b54831bcc
|
[
"MIT"
] | 3
|
2020-04-30T18:56:32.000Z
|
2020-08-03T10:20:36.000Z
|
src/tests/pantilt.py
|
duckida/legosort
|
8fb6a3810056ced7b09fa7e5db42148b54831bcc
|
[
"MIT"
] | 1
|
2020-08-03T23:34:03.000Z
|
2020-08-03T23:34:03.000Z
|
src/tests/pantilt.py
|
duckida/legosort
|
8fb6a3810056ced7b09fa7e5db42148b54831bcc
|
[
"MIT"
] | null | null | null |
from gpiozero import Servo
import time
btm = Servo(17)
top = Servo(26)
#The Drop mechanism
print("Demo of Dropping Bricks")
top.mid()
print("Dropping into Box")
time.sleep(1)
top.min()
print("going back")
time.sleep(1)
#The Turn-To-Box mechanism
print("Demo of Multiple Boxes")
btm.min()
print("Box 1")
time.sleep(1)
btm.mid()
print("Box 2")
time.sleep(1)
btm.max()
print("Box 3")
time.sleep(1)
#The Full demo
print("Full Demo")
btm.min()
print(btm.min())
print("Box 1")
time.sleep(1)
top.mid()
print("Dropping into Box")
time.sleep(1)
top.min()
print("going back")
time.sleep(1)
btm.mid()
print("Box 2")
time.sleep(1)
top.mid()
print("Dropping into Box")
time.sleep(1)
top.min()
print("going back")
time.sleep(1)
btm.max()
print("Box 3")
time.sleep(1)
top.mid()
print("Dropping into Box")
time.sleep(1)
top.min()
print("going back")
time.sleep(1)
| 14.689655
| 32
| 0.690141
| 155
| 852
| 3.793548
| 0.212903
| 0.214286
| 0.238095
| 0.154762
| 0.704082
| 0.704082
| 0.704082
| 0.704082
| 0.653061
| 0.653061
| 0
| 0.031746
| 0.112676
| 852
| 57
| 33
| 14.947368
| 0.746032
| 0.065728
| 0
| 0.84
| 0
| 0
| 0.242119
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.04
| 0
| 0.04
| 0.36
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8bb60eab3be70b83df3800a1a7364e52a15228c8
| 67,750
|
py
|
Python
|
tests/test_checkviewaccess.py
|
django-roles-access/master
|
066d0d6b99b986eacc736e6973b415cbb9172d46
|
[
"MIT"
] | 5
|
2019-03-22T08:08:25.000Z
|
2019-04-11T11:46:52.000Z
|
tests/test_checkviewaccess.py
|
django-roles-access/master
|
066d0d6b99b986eacc736e6973b415cbb9172d46
|
[
"MIT"
] | 5
|
2019-04-03T21:53:52.000Z
|
2019-05-22T22:41:34.000Z
|
tests/test_checkviewaccess.py
|
django-roles-access/master
|
066d0d6b99b986eacc736e6973b415cbb9172d46
|
[
"MIT"
] | null | null | null |
from django.conf import settings
from django.contrib.auth.models import Group
from django.utils import timezone
try:
from django.utils.six import StringIO
except:
from io import StringIO
from django_roles_access.models import ViewAccess
try:
from unittest.mock import Mock, patch, MagicMock, ANY, PropertyMock
except:
from mock import Mock, patch, ANY, PropertyMock
from unittest.case import TestCase as UnitTestCase
from django.core.management import call_command
from django.test import TestCase, modify_settings
from django_roles_access.utils import (NONE_TYPE_DEFAULT, NOT_SECURED_DEFAULT,
APP_NAME_FOR_NONE, DISABLED_DEFAULT)
@patch('django_roles_access.management.commands.checkviewaccess.import_module')
@patch('django_roles_access.management.commands.checkviewaccess.settings')
class UnitTestCheckViewAccessCommon(UnitTestCase):
def setUp(self):
self.root_urlconf = Mock()
@patch('django_roles_access.management.commands.checkviewaccess'
'.OutputReport')
def test_OutputFormater_is_called(
self, mock_otuput_formater, mock_settings, mock_import_module,
):
mock_settings.ROOT_URLCONF = self.root_urlconf
call_command('checkviewaccess')
assert mock_otuput_formater.called
@patch('django_roles_access.management.commands.checkviewaccess'
'.OutputReport')
def test_OutputFormater_is_called_once(
self, mock_otuput_formater, mock_settings, mock_import_module
):
mock_settings.ROOT_URLCONF = self.root_urlconf
call_command('checkviewaccess')
self.assertEqual(mock_otuput_formater.call_count, 1)
@patch('django_roles_access.management.commands.checkviewaccess'
'.OutputReport')
@patch('django_roles_access.management.commands.checkviewaccess.Command'
'.style', create=True, new_callable=PropertyMock)
@patch('django_roles_access.management.commands.checkviewaccess.Command'
'.stdout', create=True, new_callable=PropertyMock)
def test_OutputFormater_is_called_once_with(
self, mock_stdout, mock_style, mock_output_formater, mock_settings,
mock_import_module
):
mock_settings.ROOT_URLCONF = self.root_urlconf
stdout = mock_stdout.return_value
style = mock_style.return_value
call_command('checkviewaccess')
mock_output_formater.assert_called_once_with(stdout, style)
@patch('django_roles_access.management.commands.checkviewaccess'
'.OutputReport.write_header')
def test_write_header(
self, mock_write_header, mock_settings, mock_import_module,
# mock_output_formater
):
mock_settings.ROOT_URLCONF = self.root_urlconf
call_command('checkviewaccess')
mock_write_header.assert_called_once_with()
def test_import_module_is_called(
self, mock_settings, mock_import_module,
):
mock_settings.ROOT_URLCONF = self.root_urlconf
call_command('checkviewaccess')
assert mock_import_module.called
def test_import_module_is_called_once(
self, mock_settings, mock_import_module,
):
mock_settings.ROOT_URLCONF = self.root_urlconf
call_command('checkviewaccess')
self.assertEqual(mock_import_module.call_count, 1)
def test_import_module_is_called_once_with(
self, mock_settings, mock_import_module,
):
mock_settings.ROOT_URLCONF = self.root_urlconf
call_command('checkviewaccess')
mock_import_module.assert_called_once_with(self.root_urlconf)
@patch('django_roles_access.management.commands.checkviewaccess.walk_site_url')
def test_walk_site_url_is_called(
self, mock_walk_site_url, mock_settings, mock_import_module,
):
mock_import_module.urlpatterns = 'fake-url-pattern'
call_command('checkviewaccess')
assert mock_walk_site_url.called
@patch('django_roles_access.management.commands.checkviewaccess.walk_site_url')
def test_walk_site_url_is_called_once(
self, mock_walk_site_url, mock_settings, mock_import_module,
):
mock_import_module.urlpatterns = 'fake-url-pattern'
call_command('checkviewaccess')
self.assertEqual(mock_walk_site_url.call_count, 1)
@patch('django_roles_access.management.commands.checkviewaccess.walk_site_url')
def test_walk_site_url_is_called_once_with(
self, mock_walk_site_url, mock_settings, mock_import_module,
):
urlpatterns = Mock()
urlpatterns.urlpatterns = 'fake-urlpatterns'
mock_import_module.return_value = urlpatterns
call_command('checkviewaccess')
mock_walk_site_url.assert_called_once_with('fake-urlpatterns')
@patch('django_roles_access.management.commands.checkviewaccess.get_views_by_app')
def test_get_views_by_app_is_called(
self, mock_get_views_by_app, mock_settings, mock_import_module,
):
mock_settings.ROOT_URLCONF = self.root_urlconf
call_command('checkviewaccess')
assert mock_get_views_by_app.called
@patch('django_roles_access.management.commands.checkviewaccess.get_views_by_app')
def test_get_views_by_app_is_called_once(
self, mock_get_views_by_app, mock_settings, mock_import_module,
):
mock_settings.ROOT_URLCONF = self.root_urlconf
call_command('checkviewaccess')
self.assertEqual(mock_get_views_by_app.call_count, 1)
@patch('django_roles_access.management.commands.checkviewaccess.walk_site_url')
@patch('django_roles_access.management.commands.checkviewaccess.get_views_by_app')
def test_get_views_by_app_is_called_once_with(
self, mock_get_views_by_app, mock_walk_site_url, mock_settings,
mock_import_module,
):
mock_settings.ROOT_URLCONF = self.root_urlconf
mock_walk_site_url.return_value = 'fake-result'
call_command('checkviewaccess')
mock_get_views_by_app.assert_called_once_with('fake-result')
@patch('django_roles_access.management.commands.checkviewaccess'
'.OutputReport.write_middleware_status')
def test_write_middleware_status(
self, mock_write_middleware_status, mock_settings,
mock_import_module
):
mock_settings.ROOT_URLCONF = self.root_urlconf
call_command('checkviewaccess')
self.assertEqual(mock_write_middleware_status.call_count, 1)
@patch('django_roles_access.management.commands.checkviewaccess'
'.OutputReport.write_end_of_head')
def test_write_end_of_head(
self, mock_write_end_of_head, mock_settings,
mock_import_module
):
mock_settings.ROOT_URLCONF = self.root_urlconf
call_command('checkviewaccess')
self.assertEqual(mock_write_end_of_head.call_count, 1)
@patch('django_roles_access.management.commands.checkviewaccess'
'.OutputReport.process_application_data')
def test_process_application_data(
self, mock_process_application_data, mock_settings,
mock_import_module
):
mock_settings.ROOT_URLCONF = self.root_urlconf
call_command('checkviewaccess')
assert mock_process_application_data.called
@patch('django_roles_access.management.commands.checkviewaccess'
'.OutputReport.process_view_data')
@patch('django_roles_access.management.commands.checkviewaccess.'
'view_access_analyzer')
@patch('django_roles_access.management.commands.checkviewaccess'
'.get_views_by_app')
def test_process_view_data(
self, mock_get_views_by_app, mock_view_access_analyzer,
mock_process_view_data, mock_settings, mock_import_module
):
mock_settings.ROOT_URLCONF = self.root_urlconf
mock_get_views_by_app.return_value = {'fake-app':
[('/fake1/', 'fake-callback-1',
'fake-view')]}
mock_view_access_analyzer.return_value = 'fake-report'
call_command('checkviewaccess')
assert mock_process_view_data.called
@patch('django_roles_access.management.commands.checkviewaccess'
'.OutputReport.close_application_data')
def test_close_application_data(
self, mock_close_application_data, mock_settings,
mock_import_module
):
mock_settings.ROOT_URLCONF = self.root_urlconf
call_command('checkviewaccess')
assert mock_close_application_data.called
@patch('django_roles_access.management.commands.checkviewaccess'
'.OutputReport.write_footer')
def test_write_footer(
self, mock_write_footer, mock_settings,
mock_import_module
):
mock_settings.ROOT_URLCONF = self.root_urlconf
call_command('checkviewaccess')
assert mock_write_footer.called
@patch('django_roles_access.management.commands.checkviewaccess'
'.OutputReport.write_footer')
def test_write_footer_once(
self, mock_write_footer, mock_settings,
mock_import_module
):
mock_settings.ROOT_URLCONF = self.root_urlconf
call_command('checkviewaccess')
self.assertEqual(mock_write_footer.call_count, 1)
@patch('django_roles_access.management.commands.checkviewaccess'
'.OutputReport.write_footer')
def test_write_footer_once_without_arguments(
self, mock_write_footer, mock_settings,
mock_import_module
):
mock_settings.ROOT_URLCONF = self.root_urlconf
call_command('checkviewaccess')
mock_write_footer.assert_called_once_with()
@patch('django_roles_access.management.commands.checkviewaccess.import_module')
@patch('django_roles_access.management.commands.checkviewaccess.settings')
class UnitTestCheckViewAccessWithoutArguments(UnitTestCase):
def setUp(self):
self.root_urlconf = Mock()
@patch('django_roles_access.management.commands.checkviewaccess'
'.OutputReport.set_format')
def test_format_attribute_is_not_set(
self, mock_set_format, mock_settings, mock_import_module
):
call_command('checkviewaccess')
assert not mock_set_format.called
def test_write_at_beginning_of_command_execution(
self, mock_settings, mock_import_module
):
mock_settings.ROOT_URLCONF = self.root_urlconf
out = StringIO()
expected_text = u'Start checking views access.'
call_command('checkviewaccess', stdout=out)
self.assertIn(expected_text, out.getvalue())
def test_write_when_finish_command_execution(
self, mock_settings, mock_import_module
):
mock_settings.ROOT_URLCONF = self.root_urlconf
out = StringIO()
expected_text = u'End checking view access.'
call_command('checkviewaccess', stdout=out)
self.assertIn(expected_text, out.getvalue())
def test_write_at_beginning_of_gathering_information_phase(
self, mock_settings, mock_import_module
):
mock_settings.ROOT_URLCONF = self.root_urlconf
out = StringIO()
expected_text = u'Start gathering information.'
call_command('checkviewaccess', stdout=out)
self.assertIn(expected_text, out.getvalue())
def test_write_at_end_of_gathering_information_phase(
self, mock_settings, mock_import_module
):
mock_settings.ROOT_URLCONF = self.root_urlconf
out = StringIO()
expected_text = u'Finish gathering information.'
call_command('checkviewaccess', stdout=out)
self.assertIn(expected_text, out.getvalue())
def test_middleware_is_active_and_site_active_is_true(
self, mock_settings, mock_import_module
):
mock_settings.MIDDLEWARE = ['fake-middleware',
'django_roles_access.middleware.RolesMiddleware',
'other-fake-middleware']
mock_settings.ROOT_URLCONF = self.root_urlconf
out = StringIO()
expected_text = u'Django roles access middleware is active: True.'
call_command('checkviewaccess', stdout=out)
self.assertIn(expected_text, out.getvalue())
def test_middleware_is_not_active_and_site_active_is_false(
self, mock_settings, mock_import_module
):
mock_settings.MIDDLEWARE = ['fake-middleware',
'other-fake-middleware']
mock_settings.ROOT_URLCONF = self.root_urlconf
out = StringIO()
expected_text = u'Django roles access middleware is active: False.'
call_command('checkviewaccess', stdout=out)
self.assertIn(expected_text, out.getvalue())
@patch('django_roles_access.utils.settings')
def test_write_at_start_of_each_application_analyze(
self, mock_utils_settings, mock_settings, mock_import_module
):
mock_utils_settings.INSTALLED_APPS = ['fake-app-1', 'fake-app-2']
mock_settings.ROOT_URLCONF = self.root_urlconf
out = StringIO()
expected_text1 = u'Analyzing: fake-app-1'
expected_text2 = u'Analyzing: fake-app-2'
call_command('checkviewaccess', stdout=out)
self.assertIn(expected_text1, out.getvalue())
self.assertIn(expected_text2, out.getvalue())
@patch('django_roles_access.utils.settings')
def test_write_at_end_of_each_application_analyze(
self, mock_utils_settings, mock_settings, mock_import_module
):
mock_utils_settings.INSTALLED_APPS = ['fake-app-1', 'fake-app-2']
mock_settings.ROOT_URLCONF = self.root_urlconf
out = StringIO()
expected_text1 = u'Finish analyzing fake-app-1.'
expected_text2 = u'Finish analyzing fake-app-2.'
call_command('checkviewaccess', stdout=out)
self.assertIn(expected_text1, out.getvalue())
self.assertIn(expected_text2, out.getvalue())
@patch('django_roles_access.utils.settings')
def test_detect_installed_application_is_not_configured(
self, mock_utils_settings, mock_settings, mock_import_module
):
mock_utils_settings.INSTALLED_APPS = ['fake-app-1']
mock_settings.ROOT_URLCONF = self.root_urlconf
out = StringIO()
expected_text = u'fake-app-1 has no type.'
call_command('checkviewaccess', stdout=out)
self.assertIn(expected_text, out.getvalue())
@patch('django_roles_access.management.commands.checkviewaccess.get_app_type')
@patch('django_roles_access.utils.settings')
def test_get_app_type_is_called(
self, mock_utils_settings, mock_get_app_type, mock_settings,
mock_import_module
):
mock_utils_settings.INSTALLED_APPS = ['fake-app-1']
mock_settings.ROOT_URLCONF = self.root_urlconf
call_command('checkviewaccess')
assert mock_get_app_type.called
@patch('django_roles_access.management.commands.checkviewaccess.get_app_type')
@patch('django_roles_access.utils.settings')
def test_get_app_type_is_called_for_each_installed_app_0(
self, mock_utils_settings, mock_get_app_type, mock_settings,
mock_import_module
):
mock_utils_settings.INSTALLED_APPS = []
mock_settings.ROOT_URLCONF = self.root_urlconf
call_command('checkviewaccess')
self.assertEqual(mock_get_app_type.call_count, 0)
@patch('django_roles_access.management.commands.checkviewaccess.get_app_type')
@patch('django_roles_access.utils.settings')
def test_get_app_type_is_called_for_each_installed_app_3(
self, mock_utils_settings, mock_get_app_type, mock_settings,
mock_import_module
):
mock_utils_settings.INSTALLED_APPS = ['fake-app-1', 'fake-app-2', 'bla']
mock_settings.ROOT_URLCONF = self.root_urlconf
call_command('checkviewaccess')
self.assertEqual(mock_get_app_type.call_count, 3)
@patch('django_roles_access.utils.settings')
@patch('django_roles_access.tools.settings')
def test_detect_installed_application_is_configured_as_NOT_SECURED(
self, mock_tools_settings, mock_utils_settings, mock_settings,
mock_import_module
):
mock_utils_settings.INSTALLED_APPS = ['fake-app']
mock_tools_settings.NOT_SECURED = ['fake-app']
mock_settings.ROOT_URLCONF = self.root_urlconf
out = StringIO()
expected_text = u'fake-app is NOT_SECURED type.'
call_command('checkviewaccess', stdout=out)
self.assertIn(expected_text, out.getvalue())
@patch('django_roles_access.utils.settings')
@patch('django_roles_access.tools.settings')
def test_detect_installed_application_is_configured_as_DISABLED(
self, mock_tools_settings, mock_utils_settings, mock_settings,
mock_import_module
):
mock_utils_settings.INSTALLED_APPS = ['fake-app']
mock_tools_settings.DISABLED = ['fake-app']
mock_settings.ROOT_URLCONF = self.root_urlconf
out = StringIO()
expected_text = u'fake-app is DISABLED type.'
call_command('checkviewaccess', stdout=out)
self.assertIn(expected_text, out.getvalue())
@patch('django_roles_access.utils.settings')
@patch('django_roles_access.tools.settings')
def test_detect_installed_application_is_configured_as_PUBLIC(
self, mock_tools_settings, mock_utils_settings, mock_settings,
mock_import_module
):
mock_utils_settings.INSTALLED_APPS = ['fake-app']
mock_tools_settings.PUBLIC = ['fake-app']
mock_settings.ROOT_URLCONF = self.root_urlconf
out = StringIO()
expected_text = u'fake-app is PUBLIC type.'
call_command('checkviewaccess', stdout=out)
self.assertIn(expected_text, out.getvalue())
@patch('django_roles_access.utils.settings')
@patch('django_roles_access.tools.settings')
def test_detect_installed_application_is_configured_as_SECURED(
self, mock_tools_settings, mock_utils_settings, mock_settings,
mock_import_module
):
mock_utils_settings.INSTALLED_APPS = ['fake-app']
mock_tools_settings.SECURED = ['fake-app']
mock_settings.ROOT_URLCONF = self.root_urlconf
out = StringIO()
expected_text = u'fake-app is SECURED type.'
call_command('checkviewaccess', stdout=out)
self.assertIn(expected_text, out.getvalue())
@patch('django_roles_access.management.commands.checkviewaccess.'
'view_access_analyzer')
def test_view_analyzer_is_called_0_times_when_app_have_no_views(
self, mock_view_access_analyzer, mock_settings, mock_import_module
):
mock_settings.ROOT_URLCONF = self.root_urlconf
call_command('checkviewaccess')
self.assertEqual(mock_view_access_analyzer.call_count, 0)
@patch('django_roles_access.management.commands.checkviewaccess.get_views_by_app')
def test_when_app_have_no_views_it_is_reported(
self, mock_get_views_by_app, mock_settings, mock_import_module
):
mock_get_views_by_app.return_value = {
'fake-app-name': []
}
mock_settings.ROOT_URLCONF = self.root_urlconf
out = StringIO()
expected_text = u'\t\t{} does not have configured views.'.format(
'fake-app-name')
call_command('checkviewaccess', stdout=out)
self.assertIn(expected_text, out.getvalue())
@patch('django_roles_access.management.commands.checkviewaccess.get_views_by_app')
@patch('django_roles_access.management.commands.checkviewaccess.'
'view_access_analyzer')
def test_view_analyzer_is_called_3_times_when_app_have_3_views(
self, mock_view_access_analyzer, mock_view_by_app, mock_settings,
mock_import_module
):
mock_settings.ROOT_URLCONF = self.root_urlconf
mock_view_by_app.return_value = {'fake-app':
[('/fake1/', 'fake-callback-1',
'fake-view-1'),
('/fake2/', 'fake-callback-2',
'fake-view-1'),
('/fake3/', 'fake-callback-3',
'fake-view-1')]}
mock_view_access_analyzer.return_value = u'fake-analysis'
call_command('checkviewaccess')
self.assertEqual(mock_view_access_analyzer.call_count, 3)
@patch('django_roles_access.management.commands.checkviewaccess.get_views_by_app')
@patch('django_roles_access.management.commands.checkviewaccess.'
'view_access_analyzer')
@patch('django_roles_access.management.commands.checkviewaccess.get_app_type')
def test_view_analyzer_is_called_1_times_with_params(
self, mock_get_app_type, mock_view_access_analyzer,
mock_view_by_app, mock_settings, mock_import_module
):
mock_settings.ROOT_URLCONF = self.root_urlconf
mock_view_by_app.return_value = {'fake-app':
[('/fake1/', 'fake-callback-1',
'fake-view-1')]}
mock_get_app_type.return_value = 'fake-app-type'
mock_view_access_analyzer.return_value = u'fake-analysis'
call_command('checkviewaccess')
mock_view_access_analyzer.assert_called_with('fake-app-type',
'fake-callback-1',
'fake-view-1', False)
@patch('django_roles_access.management.commands.checkviewaccess.get_views_by_app')
@patch('django_roles_access.management.commands.checkviewaccess.'
'view_access_analyzer')
def test_view_name_is_reported(
self, mock_view_access_analyzer, mock_view_by_app, mock_settings,
mock_import_module
):
mock_settings.ROOT_URLCONF = self.root_urlconf
mock_view_by_app.return_value = {'fake-app':
[('/fake1/', 'fake-callback-1',
'fake-view')]}
out = StringIO()
expected_text = u'Analysis for view: fake-view'
mock_view_access_analyzer.return_value = u'fake-analysis'
call_command('checkviewaccess', stdout=out)
self.assertIn(expected_text, out.getvalue())
@patch('django_roles_access.management.commands.checkviewaccess.get_views_by_app')
@patch('django_roles_access.management.commands.checkviewaccess.'
'view_access_analyzer')
def test_view_url_is_reported(
self, mock_view_access_analyzer, mock_view_by_app, mock_settings,
mock_import_module
):
mock_settings.ROOT_URLCONF = self.root_urlconf
mock_view_by_app.return_value = {'fake-app':
[('/fake1/', 'fake-callback-1',
'fake-view')]}
mock_view_access_analyzer.return_value = u'fake-analysis'
out = StringIO()
expected_text = u'View url: /fake1/'
call_command('checkviewaccess', stdout=out)
self.assertIn(expected_text, out.getvalue())
@patch('django_roles_access.management.commands.checkviewaccess.get_views_by_app')
@patch('django_roles_access.management.commands.checkviewaccess.'
'view_access_analyzer')
@patch('django_roles_access.management.commands.checkviewaccess'
'.OutputReport.write_view_access_analyzer')
def test_write_report_view_analyzer_is_called_1_times(
self, mock_write_report, mock_view_access_analyzer,
mock_view_by_app, mock_settings, mock_import_module
):
mock_settings.ROOT_URLCONF = self.root_urlconf
mock_view_by_app.return_value = {'fake-app':
[('/fake1/', 'fake-callback-1',
'fake-view')]}
mock_view_access_analyzer.return_value = u'fake-analysis'
call_command('checkviewaccess')
self.assertEqual(mock_write_report.call_count, 1)
@patch('django_roles_access.management.commands.checkviewaccess.get_views_by_app')
@patch('django_roles_access.management.commands.checkviewaccess.'
'view_access_analyzer')
@patch('django_roles_access.management.commands.checkviewaccess'
'.OutputReport.write_view_access_analyzer')
def test_write_report_view_analyzer_is_called_1_times_with_param(
self, mock_write_report,
mock_view_access_analyzer, mock_view_by_app, mock_settings,
mock_import_module
):
mock_settings.ROOT_URLCONF = self.root_urlconf
mock_view_by_app.return_value = {'fake-app':
[('/fake1/', 'fake-callback-1',
'fake-view')]}
mock_view_access_analyzer.return_value = u'fake-analysis'
call_command('checkviewaccess')
mock_write_report.assert_called_once_with(u'fake-analysis')
@patch('django_roles_access.management.commands.checkviewaccess.import_module')
@patch('django_roles_access.management.commands.checkviewaccess.settings')
class UnitTestCheckViewAccessCSVOutput(UnitTestCase):
def setUp(self):
self.root_urlconf = Mock()
def test_action_accept_output_argument(
self, mock_settings, mock_import_module
):
call_command('checkviewaccess', '--output-format', 'csv')
@patch('django_roles_access.management.commands.checkviewaccess'
'.OutputReport.set_format')
def test_format_attribute_is_set_to_csv(
self, mock_set_format, mock_settings, mock_import_module
):
call_command('checkviewaccess', '--output-format', 'csv')
mock_set_format.assert_called_once_with('csv')
@patch('django_roles_access.utils.timezone')
def test_first_line_output_is_report_date(
self, mock_timezone, mock_settings, mock_import_module
):
mock_settings.ROOT_URLCONF = self.root_urlconf
out = StringIO()
mock_timezone.now.return_value = 'fake-date'
expected = u'Reported: fake-date'
call_command('checkviewaccess', '--output-format', 'csv',
stdout=out)
result = out.getvalue()
expected_result = result.split('\n')[0]
self.assertIn(expected, expected_result)
def test_report_if_django_roles_access_middleware_is_active(
self, mock_settings, mock_import_module
):
mock_settings.MIDDLEWARE = ['fake-middleware',
'django_roles_access.middleware.RolesMiddleware',
'other-fake-middleware']
mock_settings.ROOT_URLCONF = self.root_urlconf
out = StringIO()
expected = u'Django roles access middleware is active: True'
call_command('checkviewaccess', '--output-format', 'csv',
stdout=out)
result = out.getvalue()
expected_result = result.split('\n')[1]
self.assertIn(expected, expected_result)
def test_report_if_django_roles_access_middleware_is_not_active(
self, mock_settings, mock_import_module
):
mock_settings.MIDDLEWARE = ['fake-middleware',
'other-fake-middleware']
mock_settings.ROOT_URLCONF = self.root_urlconf
out = StringIO()
expected = u'Django roles access middleware is active: False'
call_command('checkviewaccess', '--output-format', 'csv',
stdout=out)
result = out.getvalue()
expected_result = result.split('\n')[1]
self.assertIn(expected, expected_result)
def test_write_csv_columns_name(
self, mock_settings, mock_import_module
):
mock_settings.ROOT_URLCONF = self.root_urlconf
out = StringIO()
expected = u'App Name,Type,View Name,Url,Status,Status description'
call_command('checkviewaccess', '--output-format', 'csv',
stdout=out)
result = out.getvalue()
expected_result = result.split('\n')[2]
self.assertIn(expected, expected_result)
@patch('django_roles_access.management.commands.checkviewaccess'
'.get_views_by_app')
@patch('django_roles_access.management.commands.checkviewaccess.'
'view_access_analyzer')
@patch('django_roles_access.management.commands.checkviewaccess'
'.get_app_type')
def test_write_rows(
self, mock_get_app_type, mock_view_access_analyzer,
mock_get_views_by_app, mock_settings,mock_import_module
):
"""
Normal case: There is app name, it has a type, there is also a view
name.
"""
def view_analyze(app_type, callback, view_name, site_active):
if view_name is None:
return None
if view_name == 'fake-view-1':
return '1-analyze'
if view_name == 'fake-view-2':
return 'ERROR: 2-analyze'
if view_name == 'fake-view-3':
return 'WARNING: 3-analyze'
mock_settings.ROOT_URLCONF = self.root_urlconf
mock_get_views_by_app.return_value = {'fake-app':
[('/fake1/', 'fake-callback-1',
'fake-view-1'),
('/fake2/', 'fake-callback-2',
'fake-view-2'),
('/fake3/', 'fake-callback-3',
'fake-view-3')]
}
mock_view_access_analyzer.side_effect = view_analyze
mock_get_app_type.return_value = 'fake-type'
out = StringIO()
call_command('checkviewaccess', '--output-format', 'csv',
stdout=out)
result = out.getvalue()
expected1 = 'fake-app,fake-type,fake-view-1,/fake1/,Normal,1-analyze'
expected2 = 'fake-app,fake-type,fake-view-2,/fake2/,Error,2-analyze'
expected3 = 'fake-app,fake-type,fake-view-3,/fake3/,Warning,3-analyze'
expected_result = result.split('\n')
self.assertEqual(expected_result[3], expected1)
self.assertEqual(expected_result[4], expected2)
self.assertEqual(expected_result[5], expected3)
@patch('django_roles_access.management.commands.checkviewaccess'
'.get_views_by_app')
@patch('django_roles_access.management.commands.checkviewaccess.'
'view_access_analyzer')
@patch('django_roles_access.management.commands.checkviewaccess'
'.get_app_type')
def test_no_write_csv_ending_data(
self, mock_get_app_type, mock_view_access_analyzer,
mock_get_views_by_app, mock_settings, mock_import_module
):
mock_settings.ROOT_URLCONF = self.root_urlconf
mock_get_views_by_app.return_value = {'fake-app':
[('/fake1/', 'fake-callback-1',
'fake-view-1'),
('/fake2/', 'fake-callback-2',
'fake-view-2'),
('/fake3/', 'fake-callback-3',
'fake-view-3')]
}
mock_view_access_analyzer.return_value = 'fake-report'
mock_get_app_type.return_value = 'fake-type'
out = StringIO()
call_command('checkviewaccess', '--output-format', 'csv',
stdout=out)
result = out.getvalue()
expected1 = 'fake-app,fake-type,fake-view-1,/fake1/,Normal,fake-report'
expected2 = 'fake-app,fake-type,fake-view-2,/fake2/,Normal,fake-report'
expected3 = 'fake-app,fake-type,fake-view-3,/fake3/,Normal,fake-report'
end = u''
expected_result = result.split('\n')
# import pdb
# pdb.set_trace()
self.assertEqual(expected_result[3], expected1)
self.assertEqual(expected_result[4], expected2)
self.assertEqual(expected_result[5], expected3)
self.assertEqual(end, result.split('\n')[6])
class IntegratedTestCheckViewAccessWithoutArgument(TestCase):
"""
Cases:
* Test default cases: no View objects for the view but Django role tool is
used and application has type: NOT_SECURED, PUBLIC, AUTHORIZED, By role.
* Test no application type: no View objects for the view but Django role
tool is used and application has no type.
* Test no configuration: no View objects for the view, no Django role
tool is used. and application has type and application has no type.
"""
# RED = '\x1b[31;1m'
# GREEN = '\x1b[32;1m'
# WARNING = '\x1b[33;1m'
# ATTRIBUTES_OFF = '\x1b[0m'
NO_APP_TYPE_ERROR = u'ERROR: Django roles middleware is active; or view ' \
u'is protected with Django roles decorator or mixin, ' \
u'and has no application or application has no type. ' \
u'There are no View Access object for the view. Is ' \
u'not possible to determine behavior for access view.'
NOT_SECURED_DEFAULT = u'WARNING: View has no security configured ' \
u'(ViewAccess) and application type is ' \
u'"NOT_SECURED". No access is checked at all.'
def setUp(self):
# Clean up
try:
settings.__delattr__('NOT_SECURED')
except:
pass
try:
settings.__delattr__('PUBLIC')
except:
pass
try:
settings.__delattr__('SECURED')
except:
pass
def test_no_django_roles_tools_used(self):
expected_1 = u'\n\tAnalyzing: django_roles_access'
expected_1 += u'\n\t\tdjango_roles_access has no type.'
# expected = self.GREEN + '\n\t\t'
expected_2 = u'Analysis for view: app-ns2:middleware_view_func\n'
expected_2 += u'\t\tView url: role-included2/middleware_view_func/'
# expected += self.ATTRIBUTES_OFF + '\n' + self.GREEN
expected_2 += u'\n\t\tNo Django roles access tool used. Access to '
expected_2 += u'view depends on its implementation.'
out = StringIO()
call_command('checkviewaccess', stdout=out)
self.assertIn(expected_1, out.getvalue())
self.assertIn(expected_2, out.getvalue())
def test_no_django_roles_used_no_view_access_object_no_application(
self
):
expected_1 = u'\n\tAnalyzing: {}'.format(APP_NAME_FOR_NONE)
expected_2 = u'Analysis for view: direct_view\n'
expected_2 += u'\t\tView url: direct_view/'
expected_2 += u'\n\t\tNo Django roles access tool used. Access to '
expected_2 += u'view depends on its implementation.'
out = StringIO()
call_command('checkviewaccess', stdout=out)
self.assertIn(expected_1, out.getvalue())
self.assertIn(expected_2, out.getvalue())
def test_no_django_roles_used_no_view_access_object_no_application_type(
self
):
expected_1 = u'\n\tAnalyzing: {}'.format(APP_NAME_FOR_NONE)
expected_1 += u'\n\t\t{} has no type.'.format(APP_NAME_FOR_NONE)
expected_2 = u'Analysis for view: direct_view\n'
expected_2 += u'\t\tView url: direct_view/'
expected_2 += u'\n\t\tNo Django roles access tool used. Access to '
expected_2 += u'view depends on its implementation.'
out = StringIO()
call_command('checkviewaccess', stdout=out)
self.assertIn(expected_1, out.getvalue())
self.assertIn(expected_2, out.getvalue())
def test_no_django_roles_used_no_view_access_object_app_type_SECURED(self):
settings.__setattr__('SECURED', ['django_roles_access'])
expected_1 = u'\n\tAnalyzing: django_roles_access'
expected_1 += u'\n\t\tdjango_roles_access is SECURED type.'
expected_2 = u'Analysis for view: app-ns2:middleware_view_func\n'
expected_2 += u'\t\tView url: role-included2/middleware_view_func/'
expected_2 += u'\n\t\tNo Django roles access tool used. Access to '
expected_2 += u'view depends on its implementation.'
out = StringIO()
call_command('checkviewaccess', stdout=out)
settings.__delattr__('SECURED')
self.assertIn(expected_1, out.getvalue())
self.assertIn(expected_2, out.getvalue())
def test_decorator_no_view_access_object_app_type_None(self):
expected_1 = u'\n\tAnalyzing: django_roles_access'
expected_1 += u'\n\t\tdjango_roles_access has no type.'
expected_2 = u'Analysis for view: app-ns2:view_protected_by_role\n'
expected_2 += u'\t\tView url: role-included2/view_by_role/'
expected_2 += u'\n\t\t\t' + NONE_TYPE_DEFAULT
out = StringIO()
call_command('checkviewaccess', stdout=out)
self.assertIn(expected_1, out.getvalue())
self.assertIn(expected_2, out.getvalue())
def test_decorator_no_view_access_object_app_type_NOT_SECURED(self):
settings.__setattr__('NOT_SECURED', ['django_roles_access'])
expected_1 = u'\n\tAnalyzing: django_roles_access'
expected_1 += u'\n\t\tdjango_roles_access is NOT_SECURED type.'
expected_2 = u'Analysis for view: app-ns2:view_protected_by_role\n'
expected_2 += u'\t\tView url: role-included2/view_by_role/'
expected_2 += u'\n\t\t\t' + NOT_SECURED_DEFAULT
out = StringIO()
call_command('checkviewaccess', stdout=out)
settings.__delattr__('NOT_SECURED')
self.assertIn(expected_1, out.getvalue())
self.assertIn(expected_2, out.getvalue())
def test_decorator_no_view_access_object_app_type_DISABLED(self):
settings.__setattr__('DISABLED', ['django_roles_access'])
expected_1 = u'\n\tAnalyzing: django_roles_access'
expected_1 += u'\n\t\tdjango_roles_access is DISABLED type.'
expected_2 = u'Analysis for view: app-ns2:view_protected_by_role\n'
expected_2 += u'\t\tView url: role-included2/view_by_role/'
expected_2 += u'\n\t\t\t' + DISABLED_DEFAULT
out = StringIO()
call_command('checkviewaccess', stdout=out)
settings.__delattr__('DISABLED')
self.assertIn(expected_1, out.getvalue())
self.assertIn(expected_2, out.getvalue())
def test_decorator_no_view_access_object_app_type_SECURED(self):
settings.__setattr__('SECURED', ['django_roles_access'])
expected_1 = u'\n\tAnalyzing: django_roles_access'
expected_1 += u'\n\t\tdjango_roles_access is SECURED type.'
expected_2 = u'Analysis for view: app-ns2:view_protected_by_role\n'
expected_2 += u'\t\tView url: role-included2/view_by_role/'
expected_2 += u'\n\t\t'
out = StringIO()
call_command('checkviewaccess', stdout=out)
settings.__delattr__('SECURED')
self.assertIn(expected_1, out.getvalue())
self.assertIn(expected_2, out.getvalue())
def test_decorator_no_view_access_object_app_type_PUBLIC(self):
settings.__setattr__('PUBLIC', ['django_roles_access'])
expected_1 = u'\n\tAnalyzing: django_roles_access'
expected_1 += u'\n\t\tdjango_roles_access is PUBLIC type.'
expected_2 = u'Analysis for view: app-ns2:view_protected_by_role\n'
expected_2 += u'\t\tView url: role-included2/view_by_role/'
expected_2 += u'\n\t\t'
out = StringIO()
call_command('checkviewaccess', stdout=out)
settings.__delattr__('PUBLIC')
self.assertIn(expected_1, out.getvalue())
self.assertIn(expected_2, out.getvalue())
def test_decorator_view_access_object_app_type_None(self):
expected_1 = u'\n\tAnalyzing: django_roles_access'
expected_1 += u'\n\t\tdjango_roles_access has no type.'
expected_2 = u'Analysis for view: app-ns2:view_protected_by_role\n'
expected_2 += u'\t\tView url: role-included2/view_by_role/'
expected_2 += u'\n\t\tView access is of type Public.'
ViewAccess.objects.create(view='app-ns2:view_protected_by_role',
type='pu')
out = StringIO()
call_command('checkviewaccess', stdout=out)
self.assertIn(expected_1, out.getvalue())
self.assertIn(expected_2, out.getvalue())
def test_decorator_view_access_object_app_type_SECURED(self):
settings.__setattr__('SECURED', ['django_roles_access'])
expected_1 = u'\n\tAnalyzing: django_roles_access'
expected_1 += u'\n\t\tdjango_roles_access is SECURED type.'
expected_2 = u'Analysis for view: app-ns2:view_protected_by_role\n'
expected_2 += u'\t\tView url: role-included2/view_by_role/'
expected_2 += u'\n\t\tView access is of type Public.'
ViewAccess.objects.create(view='app-ns2:view_protected_by_role',
type='pu')
out = StringIO()
call_command('checkviewaccess', stdout=out)
settings.__delattr__('SECURED')
self.assertIn(expected_1, out.getvalue())
self.assertIn(expected_2, out.getvalue())
def test_mixin_no_view_access_object_app_type_None(self):
expected_1 = u'\n\tAnalyzing: django_roles_access'
expected_1 += u'\n\t\tdjango_roles_access has no type.'
expected_2 = u'Analysis for view: app-ns2:mixin_class_view\n'
expected_2 += u'\t\tView url: role-included2/mixin_class_view/'
expected_2 += u'\n\t\t\t' + NONE_TYPE_DEFAULT
out = StringIO()
call_command('checkviewaccess', stdout=out)
self.assertIn(expected_1, out.getvalue())
self.assertIn(expected_2, out.getvalue())
def test_mixin_no_view_access_object_app_type_NOT_SECURED(self):
settings.__setattr__('NOT_SECURED', ['django_roles_access'])
expected_1 = u'\n\tAnalyzing: django_roles_access'
expected_1 += u'\n\t\tdjango_roles_access is NOT_SECURED type.'
expected_2 = u'Analysis for view: app-ns2:mixin_class_view\n'
expected_2 += u'\t\tView url: role-included2/mixin_class_view/'
expected_2 += u'\n\t\t\t' + NOT_SECURED_DEFAULT
out = StringIO()
call_command('checkviewaccess', stdout=out)
settings.__delattr__('NOT_SECURED')
self.assertIn(expected_1, out.getvalue())
self.assertIn(expected_2, out.getvalue())
def test_mixin_no_view_access_object_app_type_SECURED(self):
settings.__setattr__('SECURED', ['django_roles_access'])
expected_1 = u'\n\tAnalyzing: django_roles_access'
expected_1 += u'\n\t\tdjango_roles_access is SECURED type.'
expected_2 = u'Analysis for view: app-ns2:mixin_class_view\n'
expected_2 += u'\t\tView url: role-included2/mixin_class_view/'
expected_2 += u'\n\t\t'
out = StringIO()
call_command('checkviewaccess', stdout=out)
settings.__delattr__('SECURED')
self.assertIn(expected_1, out.getvalue())
self.assertIn(expected_2, out.getvalue())
def test_mixin_no_view_access_object_app_type_PUBLIC(self):
settings.__setattr__('PUBLIC', ['django_roles_access'])
expected_1 = u'\n\tAnalyzing: django_roles_access'
expected_1 += u'\n\t\tdjango_roles_access is PUBLIC type.'
expected_2 = u'Analysis for view: app-ns2:mixin_class_view\n'
expected_2 += u'\t\tView url: role-included2/mixin_class_view/'
expected_2 += u'\n\t\t'
out = StringIO()
call_command('checkviewaccess', stdout=out)
settings.__delattr__('PUBLIC')
self.assertIn(expected_1, out.getvalue())
self.assertIn(expected_2, out.getvalue())
def test_mixin_view_access_object_app_type_None(self):
expected_1 = u'\n\tAnalyzing: django_roles_access'
expected_1 += u'\n\t\tdjango_roles_access has no type.'
expected_2 = u'Analysis for view: app-ns2:mixin_class_view\n'
expected_2 += u'\t\tView url: role-included2/mixin_class_view/'
expected_2 += u'\n\t\tView access is of type Public.'
ViewAccess.objects.create(view='app-ns2:mixin_class_view',
type='pu')
out = StringIO()
call_command('checkviewaccess', stdout=out)
self.assertIn(expected_1, out.getvalue())
self.assertIn(expected_2, out.getvalue())
def test_mixin_view_access_object_app_type_SECURED(self):
settings.__setattr__('SECURED', ['django_roles_access'])
expected_1 = u'\n\tAnalyzing: django_roles_access'
expected_1 += u'\n\t\tdjango_roles_access is SECURED type.'
expected_2 = u'Analysis for view: app-ns2:view_protected_by_role\n'
expected_2 += u'\t\tView url: role-included2/view_by_role/'
expected_2 += u'\n\t\tView access is of type Public.'
ViewAccess.objects.create(view='app-ns2:view_protected_by_role',
type='pu')
out = StringIO()
call_command('checkviewaccess', stdout=out)
settings.__delattr__('SECURED')
self.assertIn(expected_1, out.getvalue())
self.assertIn(expected_2, out.getvalue())
@modify_settings(MIDDLEWARE={
'append': 'django_roles_access.middleware.RolesMiddleware'
})
def test_site_active_no_view_access_object_app_type_None(self):
expected_1 = u'\n\tAnalyzing: django_roles_access'
expected_1 += u'\n\t\tdjango_roles_access has no type.'
expected_2 = u'Analysis for view: app-ns2:middleware_view_func\n'
expected_2 += u'\t\tView url: role-included2/middleware_view_func/'
expected_2 += u'\n\t\t\t' + NONE_TYPE_DEFAULT
out = StringIO()
call_command('checkviewaccess', stdout=out)
self.assertIn(expected_1, out.getvalue())
self.assertIn(expected_2, out.getvalue())
@modify_settings(MIDDLEWARE={
'append': 'django_roles_access.middleware.RolesMiddleware'
})
def test_site_active_no_view_access_object_app_type_NOT_SECURED(self):
settings.__setattr__('NOT_SECURED', ['django_roles_access'])
expected_1 = u'\n\tAnalyzing: django_roles_access'
expected_1 += u'\n\t\tdjango_roles_access is NOT_SECURED type.'
expected_2 = u'Analysis for view: app-ns2:middleware_view_func\n'
expected_2 += u'\t\tView url: role-included2/middleware_view_func/'
expected_2 += u'\n\t\t\t' + NOT_SECURED_DEFAULT
out = StringIO()
call_command('checkviewaccess', stdout=out)
settings.__delattr__('NOT_SECURED')
self.assertIn(expected_1, out.getvalue())
self.assertIn(expected_2, out.getvalue())
@modify_settings(MIDDLEWARE={
'append': 'django_roles_access.middleware.RolesMiddleware'
})
def test_site_active_no_view_access_object_app_type_SECURED(self):
settings.__setattr__('SECURED', ['django_roles_access'])
expected_1 = u'\n\tAnalyzing: django_roles_access'
expected_1 += u'\n\t\tdjango_roles_access is SECURED type.'
expected_2 = u'Analysis for view: app-ns2:middleware_view_func\n'
expected_2 += u'\t\tView url: role-included2/middleware_view_func/'
expected_2 += u'\n\t\t'
out = StringIO()
call_command('checkviewaccess', stdout=out)
settings.__delattr__('SECURED')
self.assertIn(expected_1, out.getvalue())
self.assertIn(expected_2, out.getvalue())
@modify_settings(MIDDLEWARE={
'append': 'django_roles_access.middleware.RolesMiddleware'
})
def test_site_active_no_view_access_object_app_type_PUBLIC(self):
settings.__setattr__('PUBLIC', ['django_roles_access'])
expected_1 = u'\n\tAnalyzing: django_roles_access'
expected_1 += u'\n\t\tdjango_roles_access is PUBLIC type.'
expected_2 = u'Analysis for view: app-ns2:middleware_view_func\n'
expected_2 += u'\t\tView url: role-included2/middleware_view_func/'
expected_2 += u'\n\t\t'
out = StringIO()
call_command('checkviewaccess', stdout=out)
settings.__delattr__('PUBLIC')
self.assertIn(expected_1, out.getvalue())
self.assertIn(expected_2, out.getvalue())
@modify_settings(MIDDLEWARE={
'append': 'django_roles_access.middleware.RolesMiddleware'
})
def test_site_active_view_access_object_app_type_None(self):
expected_1 = u'\n\tAnalyzing: django_roles_access'
expected_1 += u'\n\t\tdjango_roles_access has no type.'
expected_2 = u'Analysis for view: app-ns2:middleware_view_func\n'
expected_2 += u'\t\tView url: role-included2/middleware_view_func/'
expected_2 += u'\n\t\tView access is of type Public.'
ViewAccess.objects.create(view='app-ns2:middleware_view_func',
type='pu')
out = StringIO()
call_command('checkviewaccess', stdout=out)
self.assertIn(expected_1, out.getvalue())
self.assertIn(expected_2, out.getvalue())
@modify_settings(MIDDLEWARE={
'append': 'django_roles_access.middleware.RolesMiddleware'
})
def test_site_active_view_access_object_app_type_NOT_SECURED(self):
settings.__setattr__('NOT_SECURED', ['django_roles_access'])
expected_1 = u'\n\tAnalyzing: django_roles_access'
expected_1 += u'\n\t\tdjango_roles_access is NOT_SECURED type.'
expected_2 = u'Analysis for view: app-ns2:middleware_view_func\n'
expected_2 += u'\t\tView url: role-included2/middleware_view_func/'
expected_2 += u'\n\t\t\tWARNING: View belongs to an application of type'
expected_2 += u' "NOT_SECURED". No access is checked at all.'
ViewAccess.objects.create(view='app-ns2:middleware_view_func',
type='pu')
out = StringIO()
call_command('checkviewaccess', stdout=out)
settings.__delattr__('NOT_SECURED')
self.assertIn(expected_1, out.getvalue())
self.assertIn(expected_2, out.getvalue())
@modify_settings(MIDDLEWARE={
'append': 'django_roles_access.middleware.RolesMiddleware'
})
def test_site_active_view_access_object_app_type_SECURED(self):
settings.__setattr__('SECURED', ['django_roles_access'])
expected_1 = u'\n\tAnalyzing: django_roles_access'
expected_1 += u'\n\t\tdjango_roles_access is SECURED type.'
expected_2 = u'Analysis for view: app-ns2:middleware_view_func\n'
expected_2 += u'\t\tView url: role-included2/middleware_view_func/'
expected_2 += u'\n\t\tView access is of type By role.'
expected_2 += u'Roles with access: role-1, role-2'
role_1, created = Group.objects.get_or_create(name='role-1')
role_2, created = Group.objects.get_or_create(name='role-2')
view_access = ViewAccess.objects.create(
view='app-ns2:middleware_view_func',
type='br')
view_access.roles.add(role_1)
view_access.roles.add(role_2)
view_access.save()
out = StringIO()
call_command('checkviewaccess', stdout=out)
settings.__delattr__('SECURED')
self.assertIn(expected_1, out.getvalue())
self.assertIn(expected_2, out.getvalue())
@modify_settings(MIDDLEWARE={
'append': 'django_roles_access.middleware.RolesMiddleware'
})
def test_site_active_view_access_object_app_type_PUBLIC(self):
settings.__setattr__('PUBLIC', ['django_roles_access'])
expected_1 = u'\n\tAnalyzing: django_roles_access'
expected_1 += u'\n\t\tdjango_roles_access is PUBLIC type.'
expected_2 = u'Analysis for view: app-ns2:middleware_view_func\n'
expected_2 += u'\t\tView url: role-included2/middleware_view_func/'
expected_2 += u'\n\t\tView access is of type Authorized.'
ViewAccess.objects.create(view='app-ns2:middleware_view_func',
type='au')
out = StringIO()
call_command('checkviewaccess', stdout=out)
settings.__delattr__('PUBLIC')
self.assertIn(expected_1, out.getvalue())
self.assertIn(expected_2, out.getvalue())
class IntegratedTestCheckViewAccessOutputCSVFormat(TestCase):
"""
Cases:
* Test default cases: no View objects for the view but Django role tool is
used and application has type: NOT_SECURED, PUBLIC, AUTHORIZED, By role.
* Test no application type: no View objects for the view but Django role
tool is used and application has no type.
* Test no configuration: no View objects for the view, no Django role
tool is used. and application has type and application has no type.
"""
# RED = '\x1b[31;1m'
# GREEN = '\x1b[32;1m'
# WARNING = '\x1b[33;1m'
# ATTRIBUTES_OFF = '\x1b[0m'
NO_APP_TYPE_ERROR = u'ERROR: Django roles middleware is active; or view ' \
u'is protected with Django roles decorator or mixin, ' \
u'and has no application or application has no type. ' \
u'There are no View Access object for the view. Is ' \
u'not possible to determine behavior for access view.'
NOT_SECURED_DEFAULT = u'WARNING: View has no security configured ' \
u'(ViewAccess) and application type is ' \
u'"NOT_SECURED". No access is checked at all.'
def setUp(self):
# Clean up
try:
settings.__delattr__('NOT_SECURED')
except:
pass
try:
settings.__delattr__('PUBLIC')
except:
pass
try:
settings.__delattr__('SECURED')
except:
pass
# self.header = u'Reported: {}\n'.format(timezone.now())
def test_no_django_roles_tools_used(self):
expected = u'django_roles_access,no type,app-ns2:middleware_view_func,'
expected += u'role-included2/middleware_view_func/,Normal,'
expected += u'No Django roles access tool used. Access to view depends '
expected += u'on its implementation.'
out = StringIO()
call_command('checkviewaccess', '--output-format', 'csv', stdout=out)
self.assertIn(expected, out.getvalue())
def test_no_django_roles_used_no_view_access_object_no_application(
self
):
expected = u'{},'.format(APP_NAME_FOR_NONE)
expected += u'no type,direct_view,direct_view/,Normal,'
expected += u'No Django roles access tool used. Access to '
expected += u'view depends on its implementation.'
out = StringIO()
call_command('checkviewaccess', '--output-format', 'csv', stdout=out)
self.assertIn(expected, out.getvalue())
def test_no_django_roles_used_no_view_access_object_app_type_SECURED(self):
settings.__setattr__('SECURED', ['django_roles_access'])
expected = u'django_roles_access,SECURED,app-ns2:middleware_view_func,'
expected += u'role-included2/middleware_view_func/,Normal,'
expected += u'No Django roles access tool used. Access to '
expected += u'view depends on its implementation.'
out = StringIO()
call_command('checkviewaccess', '--output-format', 'csv', stdout=out)
settings.__delattr__('SECURED')
self.assertIn(expected, out.getvalue())
def test_decorator_no_view_access_object_app_type_None(self):
expected = u'django_roles_access,no type,app-ns2:view_protected_by_role,'
expected += u'role-included2/view_by_role/,Error'
expected += u',' + NONE_TYPE_DEFAULT.split('ERROR: ')[1]
out = StringIO()
call_command('checkviewaccess', '--output-format', 'csv', stdout=out)
self.assertIn(expected, out.getvalue())
def test_decorator_no_view_access_object_app_type_NOT_SECURED(self):
settings.__setattr__('NOT_SECURED', ['django_roles_access'])
expected = u'django_roles_access,NOT_SECURED,app-ns2:view_protected_by_role,'
expected += u'role-included2/view_by_role/,Warning'
expected += u',' + NOT_SECURED_DEFAULT.split('WARNING: ')[1]
out = StringIO()
call_command('checkviewaccess', '--output-format', 'csv', stdout=out)
settings.__delattr__('NOT_SECURED')
self.assertIn(expected, out.getvalue())
def test_decorator_no_view_access_object_app_type_SECURED(self):
settings.__setattr__('SECURED', ['django_roles_access'])
expected = u'django_roles_access,SECURED,app-ns2:view_protected_by_role,'
expected += u'role-included2/view_by_role/,Normal,'
out = StringIO()
call_command('checkviewaccess', '--output-format', 'csv', stdout=out)
settings.__delattr__('SECURED')
self.assertIn(expected, out.getvalue())
def test_decorator_no_view_access_object_app_type_PUBLIC(self):
settings.__setattr__('PUBLIC', ['django_roles_access'])
expected = u'django_roles_access,PUBLIC,app-ns2:view_protected_by_role,'
expected += u'role-included2/view_by_role/,Normal,'
out = StringIO()
call_command('checkviewaccess', '--output-format', 'csv', stdout=out)
settings.__delattr__('PUBLIC')
self.assertIn(expected, out.getvalue())
def test_decorator_view_access_object_app_type_None(self):
expected = u'django_roles_access,no type,app-ns2:view_protected_by_role,'
expected += u'role-included2/view_by_role/,Normal,'
expected += u'View access is of type Public.'
ViewAccess.objects.create(view='app-ns2:view_protected_by_role',
type='pu')
out = StringIO()
call_command('checkviewaccess', '--output-format', 'csv', stdout=out)
self.assertIn(expected, out.getvalue())
def test_decorator_view_access_object_app_type_SECURED(self):
settings.__setattr__('SECURED', ['django_roles_access'])
expected = u'django_roles_access,SECURED,app-ns2:view_protected_by_role,'
expected += u'role-included2/view_by_role/,Normal,'
expected += u'View access is of type Public.'
ViewAccess.objects.create(view='app-ns2:view_protected_by_role',
type='pu')
out = StringIO()
call_command('checkviewaccess', '--output-format', 'csv', stdout=out)
settings.__delattr__('SECURED')
self.assertIn(expected, out.getvalue())
def test_mixin_no_view_access_object_app_type_None(self):
expected = u'django_roles_access,no type,app-ns2:mixin_class_view,'
expected += u'role-included2/mixin_class_view/,Error'
expected += u',' + NONE_TYPE_DEFAULT.split('ERROR: ')[1]
out = StringIO()
call_command('checkviewaccess', '--output-format', 'csv', stdout=out)
self.assertIn(expected, out.getvalue())
def test_mixin_no_view_access_object_app_type_NOT_SECURED(self):
settings.__setattr__('NOT_SECURED', ['django_roles_access'])
expected = u'django_roles_access,NOT_SECURED,app-ns2:mixin_class_view,'
expected += u'role-included2/mixin_class_view/,Warning'
expected += u',' + NOT_SECURED_DEFAULT.split('WARNING: ')[1]
out = StringIO()
call_command('checkviewaccess', '--output-format', 'csv', stdout=out)
settings.__delattr__('NOT_SECURED')
self.assertIn(expected, out.getvalue())
def test_mixin_no_view_access_object_app_type_SECURED(self):
settings.__setattr__('SECURED', ['django_roles_access'])
expected = u'django_roles_access,SECURED,app-ns2:mixin_class_view,'
expected += u'role-included2/mixin_class_view/,Normal,'
out = StringIO()
call_command('checkviewaccess', '--output-format', 'csv', stdout=out)
settings.__delattr__('SECURED')
self.assertIn(expected, out.getvalue())
def test_mixin_no_view_access_object_app_type_PUBLIC(self):
settings.__setattr__('PUBLIC', ['django_roles_access'])
expected = u'django_roles_access,PUBLIC,app-ns2:mixin_class_view,'
expected += u'role-included2/mixin_class_view/,Normal,'
out = StringIO()
call_command('checkviewaccess', '--output-format', 'csv', stdout=out)
settings.__delattr__('PUBLIC')
self.assertIn(expected, out.getvalue())
def test_mixin_view_access_object_app_type_None(self):
expected = u'django_roles_access,no type,app-ns2:mixin_class_view,'
expected += u'role-included2/mixin_class_view/,Normal,'
expected += u'View access is of type Public.'
ViewAccess.objects.create(view='app-ns2:mixin_class_view',
type='pu')
out = StringIO()
call_command('checkviewaccess', '--output-format', 'csv', stdout=out)
self.assertIn(expected, out.getvalue())
def test_mixin_view_access_object_app_type_SECURED(self):
settings.__setattr__('SECURED', ['django_roles_access'])
expected = u'django_roles_access,SECURED,app-ns2:view_protected_by_role,'
expected += u'role-included2/view_by_role/,Normal,'
expected += u'View access is of type Public.'
ViewAccess.objects.create(view='app-ns2:view_protected_by_role',
type='pu')
out = StringIO()
call_command('checkviewaccess', '--output-format', 'csv', stdout=out)
settings.__delattr__('SECURED')
self.assertIn(expected, out.getvalue())
@modify_settings(MIDDLEWARE={
'append': 'django_roles_access.middleware.RolesMiddleware'
})
def test_site_active_no_view_access_object_app_type_None(self):
expected = u'django_roles_access,no type,app-ns2:middleware_view_func,'
expected += u'role-included2/middleware_view_func/,Error'
expected += u',' + NONE_TYPE_DEFAULT.split('ERROR: ')[1]
out = StringIO()
call_command('checkviewaccess', '--output-format', 'csv', stdout=out)
self.assertIn(expected, out.getvalue())
@modify_settings(MIDDLEWARE={
'append': 'django_roles_access.middleware.RolesMiddleware'
})
def test_site_active_no_view_access_object_app_type_NOT_SECURED(self):
settings.__setattr__('NOT_SECURED', ['django_roles_access'])
expected = u'django_roles_access,NOT_SECURED,app-ns2:middleware_view_func,'
expected += u'role-included2/middleware_view_func/,Warning'
expected += u',' + NOT_SECURED_DEFAULT.split('WARNING: ')[1]
out = StringIO()
call_command('checkviewaccess', '--output-format', 'csv', stdout=out)
settings.__delattr__('NOT_SECURED')
self.assertIn(expected, out.getvalue())
@modify_settings(MIDDLEWARE={
'append': 'django_roles_access.middleware.RolesMiddleware'
})
def test_site_active_no_view_access_object_app_type_SECURED(self):
settings.__setattr__('SECURED', ['django_roles_access'])
expected = u'django_roles_access,SECURED,app-ns2:middleware_view_func,'
expected += u'role-included2/middleware_view_func/,Normal,'
out = StringIO()
call_command('checkviewaccess', '--output-format', 'csv', stdout=out)
settings.__delattr__('SECURED')
self.assertIn(expected, out.getvalue())
@modify_settings(MIDDLEWARE={
'append': 'django_roles_access.middleware.RolesMiddleware'
})
def test_site_active_no_view_access_object_app_type_PUBLIC(self):
settings.__setattr__('PUBLIC', ['django_roles_access'])
expected = u'django_roles_access,PUBLIC,app-ns2:middleware_view_func,'
expected += u'role-included2/middleware_view_func/,Normal,'
out = StringIO()
call_command('checkviewaccess', '--output-format', 'csv', stdout=out)
settings.__delattr__('PUBLIC')
self.assertIn(expected, out.getvalue())
@modify_settings(MIDDLEWARE={
'append': 'django_roles_access.middleware.RolesMiddleware'
})
def test_site_active_view_access_object_app_type_None(self):
expected = u'django_roles_access,no type,app-ns2:middleware_view_func,'
expected += u'role-included2/middleware_view_func/,Normal,'
expected += u'View access is of type Public.'
ViewAccess.objects.create(view='app-ns2:middleware_view_func',
type='pu')
out = StringIO()
call_command('checkviewaccess', '--output-format', 'csv', stdout=out)
self.assertIn(expected, out.getvalue())
@modify_settings(MIDDLEWARE={
'append': 'django_roles_access.middleware.RolesMiddleware'
})
def test_site_active_view_access_object_app_type_NOT_SECURED(self):
settings.__setattr__('NOT_SECURED', ['django_roles_access'])
expected = u'django_roles_access,NOT_SECURED,'
expected += u'app-ns2:middleware_view_func,'
expected += u'role-included2/middleware_view_func/,Warning,'
expected += u'View belongs to an application of type "NOT_SECURED". '
expected += u'No access is checked at all.'
ViewAccess.objects.create(view='app-ns2:middleware_view_func',
type='pu')
out = StringIO()
call_command('checkviewaccess', '--output-format', 'csv', stdout=out)
settings.__delattr__('NOT_SECURED')
self.assertIn(expected, out.getvalue())
@modify_settings(MIDDLEWARE={
'append': 'django_roles_access.middleware.RolesMiddleware'
})
def test_site_active_view_access_object_app_type_SECURED(self):
settings.__setattr__('SECURED', ['django_roles_access'])
expected = u'django_roles_access,SECURED,app-ns2:middleware_view_func,'
expected += u'role-included2/middleware_view_func/,Normal,'
expected += u'View access is of type By role.'
expected += u'Roles with access: role-1, role-2'
role_1, created = Group.objects.get_or_create(name='role-1')
role_2, created = Group.objects.get_or_create(name='role-2')
view_access = ViewAccess.objects.create(
view='app-ns2:middleware_view_func',
type='br')
view_access.roles.add(role_1)
view_access.roles.add(role_2)
view_access.save()
out = StringIO()
call_command('checkviewaccess', '--output-format', 'csv', stdout=out)
settings.__delattr__('SECURED')
self.assertIn(expected, out.getvalue())
@modify_settings(MIDDLEWARE={
'append': 'django_roles_access.middleware.RolesMiddleware'
})
def test_site_active_view_access_object_app_type_PUBLIC(self):
settings.__setattr__('PUBLIC', ['django_roles_access'])
expected = u'django_roles_access,PUBLIC,app-ns2:middleware_view_func,'
expected += u'role-included2/middleware_view_func/,Normal,'
expected += u'View access is of type Authorized.'
ViewAccess.objects.create(view='app-ns2:middleware_view_func',
type='au')
out = StringIO()
call_command('checkviewaccess', '--output-format', 'csv', stdout=out)
settings.__delattr__('PUBLIC')
self.assertIn(expected, out.getvalue())
| 46.821009
| 86
| 0.666952
| 8,145
| 67,750
| 5.184408
| 0.030694
| 0.053141
| 0.072868
| 0.037512
| 0.931774
| 0.919814
| 0.911147
| 0.901769
| 0.896654
| 0.886518
| 0
| 0.008891
| 0.229697
| 67,750
| 1,446
| 87
| 46.853389
| 0.800241
| 0.01907
| 0
| 0.81904
| 0
| 0.004721
| 0.303363
| 0.155684
| 0
| 0
| 0
| 0
| 0.104642
| 1
| 0.084972
| false
| 0.004721
| 0.061369
| 0
| 0.15657
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8bcbca62c26abccf44753f68aa67bbcf5978f831
| 48
|
py
|
Python
|
tests/test_import.py
|
pastas/metran
|
ac3c74ceedb75447243e369b3ba771e48099f00e
|
[
"MIT"
] | 6
|
2021-06-01T14:56:11.000Z
|
2022-02-17T15:50:34.000Z
|
tests/test_import.py
|
pastas/metran
|
ac3c74ceedb75447243e369b3ba771e48099f00e
|
[
"MIT"
] | 2
|
2021-06-02T07:31:29.000Z
|
2022-03-30T11:07:01.000Z
|
tests/test_import.py
|
pastas/metran
|
ac3c74ceedb75447243e369b3ba771e48099f00e
|
[
"MIT"
] | 2
|
2021-07-08T08:56:10.000Z
|
2022-03-30T08:32:43.000Z
|
def test_import():
import metran
return
| 12
| 18
| 0.666667
| 6
| 48
| 5.166667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.270833
| 48
| 3
| 19
| 16
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.666667
| 0
| 1.333333
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8be29c43623809f1e9b58ef18b11f5101f1d2299
| 11,801
|
py
|
Python
|
tests/tests_response_handlers/tests_hitbtc/test_trading.py
|
astsu-dev/exapi
|
1ef39ccdd77e9ddb60ec6eaa16a2cc26e1ac3e12
|
[
"MIT"
] | null | null | null |
tests/tests_response_handlers/tests_hitbtc/test_trading.py
|
astsu-dev/exapi
|
1ef39ccdd77e9ddb60ec6eaa16a2cc26e1ac3e12
|
[
"MIT"
] | null | null | null |
tests/tests_response_handlers/tests_hitbtc/test_trading.py
|
astsu-dev/exapi
|
1ef39ccdd77e9ddb60ec6eaa16a2cc26e1ac3e12
|
[
"MIT"
] | null | null | null |
from decimal import Decimal
from typing import Final
from unittest import mock
import pytest
from exapi.response_handlers.hitbtc.trading import HitbtcTradingResponseHandler
from exapi.models.hitbtc import (HitbtcOrderModel,
HitbtcTradingCurrencyBalanceModel,
HitbtcTradingFeeModel)
HANDLE_RESPONSE_PATH: Final[str] = "exapi.response_handlers.hitbtc.trading.handler.HitbtcTradingResponseHandler.handle_response"
response_mock = mock.Mock()
@pytest.fixture(scope="module")
def handler() -> HitbtcTradingResponseHandler:
return HitbtcTradingResponseHandler()
@pytest.mark.asyncio
async def test_handle_get_trading_balance_response(handler: HitbtcTradingResponseHandler) -> None:
with mock.patch(HANDLE_RESPONSE_PATH) as handle_response:
handle_response.return_value = [
{
"currency": "BTCUSDT",
"available": "0.1",
"reserved": "0.05"
},
{
"currency": "ETHUSDT",
"available": "0.3",
"reserved": "0.07"
}
]
expected = [
HitbtcTradingCurrencyBalanceModel(
currency="BTCUSDT",
available=Decimal("0.1"),
reserved=Decimal("0.05")),
HitbtcTradingCurrencyBalanceModel(
currency="ETHUSDT",
available=Decimal("0.3"),
reserved=Decimal("0.07"))
]
assert await handler.handle_get_trading_balance_response(response_mock) == expected
@pytest.mark.asyncio
async def test_handle_get_active_order_response(handler: HitbtcTradingResponseHandler) -> None:
with mock.patch(HANDLE_RESPONSE_PATH) as handle_response:
handle_response.return_value = {
"id": 5,
"clientOrderId": "234j3k242",
"symbol": "BTCUSDT",
"side": "sell",
"status": "new",
"type": "limit",
"timeInForce": "GTC",
"quantity": "0.001",
"price": "0.002",
"cumQuantity": "1.3",
"createdAt": "1234",
"updatedAt": "1234",
"postOnly": True,
"avgPrice": "0.56",
"stopPrice": "0.12",
"expireTime": "1234"
}
expected = HitbtcOrderModel(
id=5,
client_order_id="234j3k242",
symbol="BTCUSDT",
side="sell",
status="new",
type="limit",
time_in_force="GTC",
quantity=Decimal("0.001"),
price=Decimal("0.002"),
cum_quantity=Decimal("1.3"),
created_at="1234",
updated_at="1234",
post_only=True,
avg_price=Decimal("0.56"),
stop_price=Decimal("0.12"),
expire_time="1234")
assert await handler.handle_get_active_order_response(response_mock) == expected
@pytest.mark.asyncio
async def test_handle_get_active_orders_response(handler: HitbtcTradingResponseHandler) -> None:
with mock.patch(HANDLE_RESPONSE_PATH) as handle_response:
handle_response.return_value = [
{
"id": 5,
"clientOrderId": "234j3k242",
"symbol": "BTCUSDT",
"side": "sell",
"status": "new",
"type": "limit",
"timeInForce": "GTC",
"quantity": "0.001",
"price": "0.002",
"cumQuantity": "1.3",
"createdAt": "1234",
"updatedAt": "1234",
"postOnly": True,
"avgPrice": "0.56",
"stopPrice": "0.12",
"expireTime": "1234"
},
{
"id": 5,
"clientOrderId": "234j3k242",
"symbol": "BTCUSDT",
"side": "sell",
"status": "new",
"type": "limit",
"timeInForce": "GTC",
"quantity": "0.001",
"price": "0.002",
"cumQuantity": "1.3",
"createdAt": "1234",
"updatedAt": "1234",
"postOnly": True,
"avgPrice": "0.56",
"stopPrice": "0.12",
"expireTime": "1234"
}
]
expected = [
HitbtcOrderModel(
id=5,
client_order_id="234j3k242",
symbol="BTCUSDT",
side="sell",
status="new",
type="limit",
time_in_force="GTC",
quantity=Decimal("0.001"),
price=Decimal("0.002"),
cum_quantity=Decimal("1.3"),
created_at="1234",
updated_at="1234",
post_only=True,
avg_price=Decimal("0.56"),
stop_price=Decimal("0.12"),
expire_time="1234"),
HitbtcOrderModel(
id=5,
client_order_id="234j3k242",
symbol="BTCUSDT",
side="sell",
status="new",
type="limit",
time_in_force="GTC",
quantity=Decimal("0.001"),
price=Decimal("0.002"),
cum_quantity=Decimal("1.3"),
created_at="1234",
updated_at="1234",
post_only=True,
avg_price=Decimal("0.56"),
stop_price=Decimal("0.12"),
expire_time="1234")
]
assert await handler.handle_get_active_orders_response(response_mock) == expected
@pytest.mark.asyncio
async def test_handle_new_order_response(handler: HitbtcTradingResponseHandler) -> None:
with mock.patch(HANDLE_RESPONSE_PATH) as handle_response:
handle_response.return_value = {
"id": 5,
"clientOrderId": "234j3k242",
"symbol": "BTCUSDT",
"side": "sell",
"status": "new",
"type": "limit",
"timeInForce": "GTC",
"quantity": "0.001",
"price": "0.002",
"cumQuantity": "1.3",
"createdAt": "1234",
"updatedAt": "1234",
"postOnly": True,
"avgPrice": "0.56",
"stopPrice": "0.12",
"expireTime": "1234"
}
expected = HitbtcOrderModel(
id=5,
client_order_id="234j3k242",
symbol="BTCUSDT",
side="sell",
status="new",
type="limit",
time_in_force="GTC",
quantity=Decimal("0.001"),
price=Decimal("0.002"),
cum_quantity=Decimal("1.3"),
created_at="1234",
updated_at="1234",
post_only=True,
avg_price=Decimal("0.56"),
stop_price=Decimal("0.12"),
expire_time="1234")
assert await handler.handle_new_order_response(response_mock) == expected
@pytest.mark.asyncio
async def test_handle_cancel_orders_response(handler: HitbtcTradingResponseHandler) -> None:
with mock.patch(HANDLE_RESPONSE_PATH) as handle_response:
handle_response.return_value = [
{
"id": 5,
"clientOrderId": "234j3k242",
"symbol": "BTCUSDT",
"side": "sell",
"status": "new",
"type": "limit",
"timeInForce": "GTC",
"quantity": "0.001",
"price": "0.002",
"cumQuantity": "1.3",
"createdAt": "1234",
"updatedAt": "1234",
"postOnly": True,
"avgPrice": "0.56",
"stopPrice": "0.12",
"expireTime": "1234"
},
{
"id": 5,
"clientOrderId": "234j3k242",
"symbol": "BTCUSDT",
"side": "sell",
"status": "new",
"type": "limit",
"timeInForce": "GTC",
"quantity": "0.001",
"price": "0.002",
"cumQuantity": "1.3",
"createdAt": "1234",
"updatedAt": "1234",
"postOnly": True,
"avgPrice": "0.56",
"stopPrice": "0.12",
"expireTime": "1234"
}
]
expected = [
HitbtcOrderModel(
id=5,
client_order_id="234j3k242",
symbol="BTCUSDT",
side="sell",
status="new",
type="limit",
time_in_force="GTC",
quantity=Decimal("0.001"),
price=Decimal("0.002"),
cum_quantity=Decimal("1.3"),
created_at="1234",
updated_at="1234",
post_only=True,
avg_price=Decimal("0.56"),
stop_price=Decimal("0.12"),
expire_time="1234"),
HitbtcOrderModel(
id=5,
client_order_id="234j3k242",
symbol="BTCUSDT",
side="sell",
status="new",
type="limit",
time_in_force="GTC",
quantity=Decimal("0.001"),
price=Decimal("0.002"),
cum_quantity=Decimal("1.3"),
created_at="1234",
updated_at="1234",
post_only=True,
avg_price=Decimal("0.56"),
stop_price=Decimal("0.12"),
expire_time="1234")
]
assert await handler.handle_cancel_orders_response(response_mock) == expected
@pytest.mark.asyncio
async def test_handle_cancel_order_response(handler: HitbtcTradingResponseHandler) -> None:
with mock.patch(HANDLE_RESPONSE_PATH) as handle_response:
handle_response.return_value = {
"id": 5,
"clientOrderId": "234j3k242",
"symbol": "BTCUSDT",
"side": "sell",
"status": "new",
"type": "limit",
"timeInForce": "GTC",
"quantity": "0.001",
"price": "0.002",
"cumQuantity": "1.3",
"createdAt": "1234",
"updatedAt": "1234",
"postOnly": True,
"avgPrice": "0.56",
"stopPrice": "0.12",
"expireTime": "1234"
}
expected = HitbtcOrderModel(
id=5,
client_order_id="234j3k242",
symbol="BTCUSDT",
side="sell",
status="new",
type="limit",
time_in_force="GTC",
quantity=Decimal("0.001"),
price=Decimal("0.002"),
cum_quantity=Decimal("1.3"),
created_at="1234",
updated_at="1234",
post_only=True,
avg_price=Decimal("0.56"),
stop_price=Decimal("0.12"),
expire_time="1234")
assert await handler.handle_cancel_order_response(response_mock) == expected
@pytest.mark.asyncio
async def test_handle_get_fee_response(handler: HitbtcTradingResponseHandler) -> None:
with mock.patch(HANDLE_RESPONSE_PATH) as handle_response:
handle_response.return_value = {
"takeLiquidityRate": "0.0001",
"provideLiquidityRate": "0.0002"
}
expected = HitbtcTradingFeeModel(
take_liquidity_rate=Decimal("0.0001"),
provide_liquidity_rate=Decimal("0.0002"))
assert await handler.handle_get_fee_response(response_mock) == expected
| 34.305233
| 128
| 0.480214
| 1,016
| 11,801
| 5.396654
| 0.103346
| 0.049608
| 0.04979
| 0.066387
| 0.848258
| 0.820901
| 0.820901
| 0.820901
| 0.81397
| 0.81397
| 0
| 0.075566
| 0.389967
| 11,801
| 343
| 129
| 34.405248
| 0.686068
| 0
| 0
| 0.795666
| 0
| 0
| 0.171257
| 0.007711
| 0
| 0
| 0
| 0
| 0.021672
| 1
| 0.003096
| false
| 0
| 0.018576
| 0.003096
| 0.024768
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4792db33283a612d01b03f46f26f01ad757a4666
| 580
|
py
|
Python
|
train_medseg_timm-regnetx_002_image_compression.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
train_medseg_timm-regnetx_002_image_compression.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
train_medseg_timm-regnetx_002_image_compression.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
import os
ls=["python main.py --configs configs/train_medseg_unetplusplus_timm-regnetx_002_fold0_image_compression.yml",
"python main.py --configs configs/train_medseg_unetplusplus_timm-regnetx_002_fold1_image_compression.yml",
"python main.py --configs configs/train_medseg_unetplusplus_timm-regnetx_002_fold2_image_compression.yml",
"python main.py --configs configs/train_medseg_unetplusplus_timm-regnetx_002_fold3_image_compression.yml",
"python main.py --configs configs/train_medseg_unetplusplus_timm-regnetx_002_fold4_image_compression.yml",
]
for l in ls:
os.system(l)
| 52.727273
| 110
| 0.856897
| 85
| 580
| 5.435294
| 0.294118
| 0.108225
| 0.12987
| 0.205628
| 0.84632
| 0.84632
| 0.84632
| 0.84632
| 0.84632
| 0.84632
| 0
| 0.03643
| 0.053448
| 580
| 11
| 111
| 52.727273
| 0.8051
| 0
| 0
| 0
| 0
| 0
| 0.886403
| 0.671256
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
47ac3ee549003215ec35fe754f01906e13c75e52
| 39,640
|
py
|
Python
|
dabasco/af/tests/test_af_import_wyner.py
|
hhucn/dabasco
|
847cf550b4f55baf21446564c908e992a69366f0
|
[
"MIT"
] | 5
|
2017-08-02T16:55:10.000Z
|
2019-05-15T21:05:10.000Z
|
dabasco/af/tests/test_af_import_wyner.py
|
hhucn/dabasco
|
847cf550b4f55baf21446564c908e992a69366f0
|
[
"MIT"
] | null | null | null |
dabasco/af/tests/test_af_import_wyner.py
|
hhucn/dabasco
|
847cf550b4f55baf21446564c908e992a69366f0
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import unittest
from dabasco.config import *
from dabasco.af.af_graph import AF
from dabasco.af.import_wyner import import_af_wyner
from dabasco.dbas.dbas_import import import_dbas_graph, import_dbas_user
from os import path
import logging.config
log_file_path = path.join(path.dirname(path.abspath(__file__)), '../../logging.ini')
logging.config.fileConfig(log_file_path, disable_existing_loggers=False)
logger = logging.getLogger('test')
class TestAFImport(unittest.TestCase):
def test_discussion1_no_user(self):
discussion_id = 1
dbas_discussion_json = {
"inferences": [
{"conclusion": 1, "id": 1, "is_supportive": True, "premises": [2]},
{"conclusion": 1, "id": 2, "is_supportive": False, "premises": [3]}
],
"nodes": [1, 2, 3],
"undercuts": []
}
dbas_discussion = import_dbas_graph(discussion_id=discussion_id, graph_export=dbas_discussion_json)
af_result = import_af_wyner(dbas_discussion, opinion=None, opinion_strict=False)
af_reference = AF(8)
arg_1 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '1')
arg_neg1 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '1')
arg_2 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '2')
arg_neg2 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '2')
arg_3 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '3')
arg_neg3 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '3')
arg_r1 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '1')
arg_r2 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '2')
# Attacks between statement args
af_reference.set_attack(arg_1, arg_neg1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg1, arg_1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_2, arg_neg2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg2, arg_2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_3, arg_neg3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg3, arg_3, AF.DEFINITE_ATTACK)
# Undermining attacks by negated premises
af_reference.set_attack(arg_neg2, arg_r1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg3, arg_r2, AF.DEFINITE_ATTACK)
# Rebutting attacks between rules and negated conclusions
af_reference.set_attack(arg_r1, arg_neg1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg1, arg_r1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r2, arg_1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_1, arg_r2, AF.DEFINITE_ATTACK)
# Rebutting attacks between rule arguments with conflicting conclusions
af_reference.set_attack(arg_r1, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r2, arg_r1, AF.DEFINITE_ATTACK)
self.assertTrue(af_reference.is_equivalent_to(af_result))
def test_discussion2_no_user(self):
discussion_id = 2
dbas_discussion_json = {
"inferences": [
{"conclusion": 1, "id": 1, "is_supportive": True, "premises": [2]},
{"conclusion": 1, "id": 2, "is_supportive": False, "premises": [3]},
{"conclusion": 2, "id": 3, "is_supportive": False, "premises": [4]}
],
"nodes": [1, 2, 3, 4, 5],
"undercuts": [
{"conclusion": 2, "id": 4, "premises": [5]}
]
}
dbas_discussion = import_dbas_graph(discussion_id=discussion_id, graph_export=dbas_discussion_json)
af_result = import_af_wyner(dbas_discussion, opinion=None, opinion_strict=False)
af_reference = AF(14)
arg_1 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '1')
arg_neg1 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '1')
arg_2 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '2')
arg_neg2 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '2')
arg_3 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '3')
arg_neg3 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '3')
arg_4 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '4')
arg_neg4 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '4')
arg_5 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '5')
arg_neg5 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '5')
arg_r1 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '1')
arg_r2 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '2')
arg_r3 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '3')
arg_r4 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '4')
# Attacks between statement args
af_reference.set_attack(arg_1, arg_neg1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg1, arg_1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_2, arg_neg2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg2, arg_2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_3, arg_neg3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg3, arg_3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_4, arg_neg4, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg4, arg_4, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_5, arg_neg5, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg5, arg_5, AF.DEFINITE_ATTACK)
# Undermining attacks by negated premises
af_reference.set_attack(arg_neg2, arg_r1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg3, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg4, arg_r3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg5, arg_r4, AF.DEFINITE_ATTACK)
# Rebutting attacks between rules and negated conclusions
af_reference.set_attack(arg_r1, arg_neg1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg1, arg_r1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r2, arg_1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_1, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r3, arg_2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_2, arg_r3, AF.DEFINITE_ATTACK)
# Undercutting attacks
af_reference.set_attack(arg_r4, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r2, arg_r4, AF.DEFINITE_ATTACK)
# Rebutting attacks between rule arguments with conflicting conclusions
af_reference.set_attack(arg_r1, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r2, arg_r1, AF.DEFINITE_ATTACK)
self.assertTrue(af_reference.is_equivalent_to(af_result))
def test_discussion1_user1_weak_opinion(self):
discussion_id = 1
user_id = 1
assumptions_strict = False
dbas_discussion_json = {
"inferences": [
{"conclusion": 1, "id": 1, "is_supportive": True, "premises": [2]},
{"conclusion": 1, "id": 2, "is_supportive": False, "premises": [3]}
],
"nodes": [1, 2, 3],
"undercuts": []
}
dbas_user_json = {
"accepted_statements_via_click": [2],
"marked_arguments": [],
"marked_statements": [],
"rejected_arguments": [],
"rejected_statements_via_click": [3],
}
dbas_discussion = import_dbas_graph(discussion_id=discussion_id, graph_export=dbas_discussion_json)
dbas_user = import_dbas_user(discussion_id=discussion_id, user_id=user_id, user_export=dbas_user_json)
af_result = import_af_wyner(dbas_discussion, opinion=dbas_user, opinion_strict=assumptions_strict)
af_reference = AF(10)
arg_1 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '1')
arg_neg1 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '1')
arg_2 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '2')
arg_neg2 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '2')
arg_3 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '3')
arg_neg3 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '3')
arg_r1 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '1')
arg_r2 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '2')
arg_u2 = af_result.get_argument_for_name(DUMMY_LITERAL_NAME_OPINION + '_' + '2')
arg_uneg3 = af_result.get_argument_for_name(DUMMY_LITERAL_NAME_OPINION + '_' + LITERAL_PREFIX_NOT + '3')
# Attacks between statement args
af_reference.set_attack(arg_1, arg_neg1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg1, arg_1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_2, arg_neg2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg2, arg_2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_3, arg_neg3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg3, arg_3, AF.DEFINITE_ATTACK)
# Undermining attacks by negated premises
af_reference.set_attack(arg_neg2, arg_r1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg3, arg_r2, AF.DEFINITE_ATTACK)
# Rebutting attacks between rules and negated conclusions
af_reference.set_attack(arg_r1, arg_neg1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg1, arg_r1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r2, arg_1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_1, arg_r2, AF.DEFINITE_ATTACK)
# Attacks between user opinion args and negated statements
af_reference.set_attack(arg_u2, arg_neg2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg2, arg_u2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_uneg3, arg_3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_3, arg_uneg3, AF.DEFINITE_ATTACK)
# Rebutting attacks between rule arguments with conflicting conclusions
af_reference.set_attack(arg_r1, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r2, arg_r1, AF.DEFINITE_ATTACK)
self.assertTrue(af_reference.is_equivalent_to(af_result))
def test_discussion2_user2_weak_opinion(self):
discussion_id = 2
user_id = 2
assumptions_strict = False
dbas_discussion_json = {
"inferences": [
{"conclusion": 1, "id": 1, "is_supportive": True, "premises": [2]},
{"conclusion": 1, "id": 2, "is_supportive": False, "premises": [3]},
{"conclusion": 2, "id": 3, "is_supportive": False, "premises": [4]}
],
"nodes": [1, 2, 3, 4, 5],
"undercuts": [
{"conclusion": 2, "id": 4, "premises": [5]}
]
}
dbas_user_json = {
"accepted_statements_via_click": [3],
"marked_arguments": [],
"marked_statements": [4],
"rejected_arguments": [],
"rejected_statements_via_click": [5],
}
dbas_discussion = import_dbas_graph(discussion_id=discussion_id, graph_export=dbas_discussion_json)
dbas_user = import_dbas_user(discussion_id=discussion_id, user_id=user_id, user_export=dbas_user_json)
af_result = import_af_wyner(dbas_discussion, opinion=dbas_user, opinion_strict=assumptions_strict)
af_reference = AF(17)
arg_1 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '1')
arg_neg1 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '1')
arg_2 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '2')
arg_neg2 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '2')
arg_3 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '3')
arg_neg3 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '3')
arg_4 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '4')
arg_neg4 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '4')
arg_5 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '5')
arg_neg5 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '5')
arg_r1 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '1')
arg_r2 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '2')
arg_r3 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '3')
arg_r4 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '4')
arg_u3 = af_result.get_argument_for_name(DUMMY_LITERAL_NAME_OPINION + '_' + '3')
arg_u4 = af_result.get_argument_for_name(DUMMY_LITERAL_NAME_OPINION + '_' + '4')
arg_uneg5 = af_result.get_argument_for_name(DUMMY_LITERAL_NAME_OPINION + '_' + LITERAL_PREFIX_NOT + '5')
# Attacks between statement args
af_reference.set_attack(arg_1, arg_neg1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg1, arg_1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_2, arg_neg2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg2, arg_2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_3, arg_neg3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg3, arg_3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_4, arg_neg4, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg4, arg_4, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_5, arg_neg5, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg5, arg_5, AF.DEFINITE_ATTACK)
# Undermining attacks by negated premises
af_reference.set_attack(arg_neg2, arg_r1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg3, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg4, arg_r3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg5, arg_r4, AF.DEFINITE_ATTACK)
# Rebutting attacks between rules and negated conclusions
af_reference.set_attack(arg_r1, arg_neg1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg1, arg_r1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r2, arg_1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_1, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r3, arg_2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_2, arg_r3, AF.DEFINITE_ATTACK)
# Undercutting attacks
af_reference.set_attack(arg_r4, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r2, arg_r4, AF.DEFINITE_ATTACK)
# Attacks between user opinion args and negated statements
af_reference.set_attack(arg_u3, arg_neg3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg3, arg_u3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_u4, arg_neg4, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg4, arg_u4, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_uneg5, arg_5, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_5, arg_uneg5, AF.DEFINITE_ATTACK)
# Rebutting attacks between rule arguments with conflicting conclusions
af_reference.set_attack(arg_r1, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r2, arg_r1, AF.DEFINITE_ATTACK)
self.assertTrue(af_reference.is_equivalent_to(af_result))
def test_discussion1_user1_strict_opinion(self):
discussion_id = 1
user_id = 1
assumptions_strict = True
dbas_discussion_json = {
"inferences": [
{"conclusion": 1, "id": 1, "is_supportive": True, "premises": [2]},
{"conclusion": 1, "id": 2, "is_supportive": False, "premises": [3]}
],
"nodes": [1, 2, 3],
"undercuts": []
}
dbas_user_json = {
"accepted_statements_via_click": [2],
"marked_arguments": [],
"marked_statements": [],
"rejected_arguments": [],
"rejected_statements_via_click": [3],
}
dbas_discussion = import_dbas_graph(discussion_id=discussion_id, graph_export=dbas_discussion_json)
dbas_user = import_dbas_user(discussion_id=discussion_id, user_id=user_id, user_export=dbas_user_json)
af_result = import_af_wyner(dbas_discussion, opinion=dbas_user, opinion_strict=assumptions_strict)
af_reference = AF(9)
arg_1 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '1')
arg_neg1 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '1')
arg_2 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '2')
arg_neg2 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '2')
arg_3 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '3')
arg_neg3 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '3')
arg_r1 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '1')
arg_r2 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '2')
arg_u = af_result.get_argument_for_name(DUMMY_LITERAL_NAME_OPINION)
# Attacks between statement args
af_reference.set_attack(arg_1, arg_neg1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg1, arg_1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_2, arg_neg2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg2, arg_2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_3, arg_neg3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg3, arg_3, AF.DEFINITE_ATTACK)
# Undermining attacks by negated premises
af_reference.set_attack(arg_neg2, arg_r1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg3, arg_r2, AF.DEFINITE_ATTACK)
# Rebutting attacks between rules and negated conclusions
af_reference.set_attack(arg_r1, arg_neg1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg1, arg_r1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r2, arg_1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_1, arg_r2, AF.DEFINITE_ATTACK)
# Attacks from user opinion arg to negated statements
af_reference.set_attack(arg_u, arg_neg2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_u, arg_3, AF.DEFINITE_ATTACK)
# Rebutting attacks between rule arguments with conflicting conclusions
af_reference.set_attack(arg_r1, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r2, arg_r1, AF.DEFINITE_ATTACK)
self.assertTrue(af_reference.is_equivalent_to(af_result))
def test_discussion2_user2_strict_opinion(self):
discussion_id = 2
user_id = 2
assumptions_strict = True
dbas_discussion_json = {
"inferences": [
{"conclusion": 1, "id": 1, "is_supportive": True, "premises": [2]},
{"conclusion": 1, "id": 2, "is_supportive": False, "premises": [3]},
{"conclusion": 2, "id": 3, "is_supportive": False, "premises": [4]}
],
"nodes": [1, 2, 3, 4, 5],
"undercuts": [
{"conclusion": 2, "id": 4, "premises": [5]}
]
}
dbas_user_json = {
"accepted_statements_via_click": [3],
"marked_arguments": [],
"marked_statements": [4],
"rejected_arguments": [],
"rejected_statements_via_click": [5],
}
dbas_discussion = import_dbas_graph(discussion_id=discussion_id, graph_export=dbas_discussion_json)
dbas_user = import_dbas_user(discussion_id=discussion_id, user_id=user_id, user_export=dbas_user_json)
af_result = import_af_wyner(dbas_discussion, opinion=dbas_user, opinion_strict=assumptions_strict)
af_reference = AF(15)
arg_1 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '1')
arg_neg1 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '1')
arg_2 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '2')
arg_neg2 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '2')
arg_3 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '3')
arg_neg3 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '3')
arg_4 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '4')
arg_neg4 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '4')
arg_5 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '5')
arg_neg5 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '5')
arg_r1 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '1')
arg_r2 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '2')
arg_r3 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '3')
arg_r4 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '4')
arg_u = af_result.get_argument_for_name(DUMMY_LITERAL_NAME_OPINION)
# Attacks between statement args
af_reference.set_attack(arg_1, arg_neg1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg1, arg_1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_2, arg_neg2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg2, arg_2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_3, arg_neg3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg3, arg_3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_4, arg_neg4, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg4, arg_4, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_5, arg_neg5, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg5, arg_5, AF.DEFINITE_ATTACK)
# Undermining attacks by negated premises
af_reference.set_attack(arg_neg2, arg_r1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg3, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg4, arg_r3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg5, arg_r4, AF.DEFINITE_ATTACK)
# Rebutting attacks between rules and negated conclusions
af_reference.set_attack(arg_r1, arg_neg1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg1, arg_r1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r2, arg_1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_1, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r3, arg_2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_2, arg_r3, AF.DEFINITE_ATTACK)
# Undercutting attacks
af_reference.set_attack(arg_r4, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r2, arg_r4, AF.DEFINITE_ATTACK)
# Attacks between user opinion args and negated statements
af_reference.set_attack(arg_u, arg_neg3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_u, arg_neg4, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_u, arg_5, AF.DEFINITE_ATTACK)
# Rebutting attacks between rule arguments with conflicting conclusions
af_reference.set_attack(arg_r1, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r2, arg_r1, AF.DEFINITE_ATTACK)
self.assertTrue(af_reference.is_equivalent_to(af_result))
def test_discussion2_user3_weak_opinion(self):
discussion_id = 2
user_id = 3
assumptions_strict = False
dbas_discussion_json = {
"inferences": [
{"conclusion": 1, "id": 1, "is_supportive": True, "premises": [2]},
{"conclusion": 1, "id": 2, "is_supportive": False, "premises": [3]},
{"conclusion": 2, "id": 3, "is_supportive": False, "premises": [4]}
],
"nodes": [1, 2, 3, 4, 5],
"undercuts": [
{"conclusion": 2, "id": 4, "premises": [5]}
]
}
dbas_user_json = {
"accepted_statements_via_click": [2],
"marked_arguments": [],
"marked_statements": [],
"rejected_arguments": [],
"rejected_statements_via_click": [1],
}
dbas_discussion = import_dbas_graph(discussion_id=discussion_id, graph_export=dbas_discussion_json)
dbas_user = import_dbas_user(discussion_id=discussion_id, user_id=user_id, user_export=dbas_user_json)
af_result = import_af_wyner(dbas_discussion, opinion=dbas_user, opinion_strict=assumptions_strict)
af_reference = AF(16)
arg_1 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '1')
arg_neg1 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '1')
arg_2 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '2')
arg_neg2 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '2')
arg_3 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '3')
arg_neg3 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '3')
arg_4 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '4')
arg_neg4 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '4')
arg_5 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '5')
arg_neg5 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '5')
arg_r1 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '1')
arg_r2 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '2')
arg_r3 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '3')
arg_r4 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '4')
arg_uneg1 = af_result.get_argument_for_name(DUMMY_LITERAL_NAME_OPINION + '_' + LITERAL_PREFIX_NOT + '1')
arg_u2 = af_result.get_argument_for_name(DUMMY_LITERAL_NAME_OPINION + '_' + '2')
# Attacks between statement args
af_reference.set_attack(arg_1, arg_neg1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg1, arg_1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_2, arg_neg2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg2, arg_2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_3, arg_neg3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg3, arg_3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_4, arg_neg4, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg4, arg_4, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_5, arg_neg5, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg5, arg_5, AF.DEFINITE_ATTACK)
# Undermining attacks by negated premises
af_reference.set_attack(arg_neg2, arg_r1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg3, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg4, arg_r3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg5, arg_r4, AF.DEFINITE_ATTACK)
# Rebutting attacks between rules and negated conclusions
af_reference.set_attack(arg_r1, arg_neg1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg1, arg_r1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r2, arg_1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_1, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r3, arg_2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_2, arg_r3, AF.DEFINITE_ATTACK)
# Undercutting attacks
af_reference.set_attack(arg_r4, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r2, arg_r4, AF.DEFINITE_ATTACK)
# Attacks between user opinion args and negated statements
af_reference.set_attack(arg_u2, arg_neg2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg2, arg_u2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_uneg1, arg_1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_1, arg_uneg1, AF.DEFINITE_ATTACK)
# Rebutting attacks between rule arguments with conflicting conclusions
af_reference.set_attack(arg_r1, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r2, arg_r1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r1, arg_uneg1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_uneg1, arg_r1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r3, arg_u2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_u2, arg_r3, AF.DEFINITE_ATTACK)
self.assertTrue(af_reference.is_equivalent_to(af_result))
def test_discussion2_user3_strict_opinion(self):
discussion_id = 2
user_id = 3
assumptions_strict = True
dbas_discussion_json = {
"inferences": [
{"conclusion": 1, "id": 1, "is_supportive": True, "premises": [2]},
{"conclusion": 1, "id": 2, "is_supportive": False, "premises": [3]},
{"conclusion": 2, "id": 3, "is_supportive": False, "premises": [4]}
],
"nodes": [1, 2, 3, 4, 5],
"undercuts": [
{"conclusion": 2, "id": 4, "premises": [5]}
]
}
dbas_user_json = {
"accepted_statements_via_click": [2],
"marked_arguments": [],
"marked_statements": [],
"rejected_arguments": [],
"rejected_statements_via_click": [1],
}
dbas_discussion = import_dbas_graph(discussion_id=discussion_id, graph_export=dbas_discussion_json)
dbas_user = import_dbas_user(discussion_id=discussion_id, user_id=user_id, user_export=dbas_user_json)
af_result = import_af_wyner(dbas_discussion, opinion=dbas_user, opinion_strict=assumptions_strict)
af_reference = AF(15)
arg_1 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '1')
arg_neg1 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '1')
arg_2 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '2')
arg_neg2 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '2')
arg_3 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '3')
arg_neg3 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '3')
arg_4 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '4')
arg_neg4 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '4')
arg_5 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '5')
arg_neg5 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '5')
arg_r1 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '1')
arg_r2 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '2')
arg_r3 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '3')
arg_r4 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '4')
arg_u = af_result.get_argument_for_name(DUMMY_LITERAL_NAME_OPINION)
# Attacks between statement args
af_reference.set_attack(arg_1, arg_neg1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg1, arg_1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_2, arg_neg2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg2, arg_2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_3, arg_neg3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg3, arg_3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_4, arg_neg4, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg4, arg_4, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_5, arg_neg5, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg5, arg_5, AF.DEFINITE_ATTACK)
# Undermining attacks by negated premises
af_reference.set_attack(arg_neg2, arg_r1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg3, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg4, arg_r3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg5, arg_r4, AF.DEFINITE_ATTACK)
# Rebutting attacks between rules and negated conclusions
af_reference.set_attack(arg_r1, arg_neg1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg1, arg_r1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r2, arg_1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_1, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r3, arg_2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_2, arg_r3, AF.DEFINITE_ATTACK)
# Undercutting attacks
af_reference.set_attack(arg_r4, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r2, arg_r4, AF.DEFINITE_ATTACK)
# Attacks between user opinion args and negated statements
af_reference.set_attack(arg_u, arg_neg2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_u, arg_1, AF.DEFINITE_ATTACK)
# Rebutting attacks between rule arguments with conflicting conclusions
af_reference.set_attack(arg_r1, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r2, arg_r1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_u, arg_r1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_u, arg_r3, AF.DEFINITE_ATTACK)
self.assertTrue(af_reference.is_equivalent_to(af_result))
def test_discussion2_user99_weak_opinion(self):
discussion_id = 2
user_id = 99
assumptions_strict = False
dbas_discussion_json = {
"inferences": [
{"conclusion": 1, "id": 1, "is_supportive": True, "premises": [2]},
{"conclusion": 1, "id": 2, "is_supportive": False, "premises": [3]},
{"conclusion": 2, "id": 3, "is_supportive": False, "premises": [4]}
],
"nodes": [1, 2, 3, 4, 5],
"undercuts": [
{"conclusion": 2, "id": 4, "premises": [5]}
]
}
dbas_user_json = {
"accepted_statements_via_click": [3, 4, 99],
"marked_arguments": [],
"marked_statements": [],
"rejected_arguments": [],
"rejected_statements_via_click": [5, 999],
}
dbas_discussion = import_dbas_graph(discussion_id=discussion_id, graph_export=dbas_discussion_json)
dbas_user = import_dbas_user(discussion_id=discussion_id, user_id=user_id, user_export=dbas_user_json)
af_result = import_af_wyner(dbas_discussion, opinion=dbas_user, opinion_strict=assumptions_strict)
af_reference = AF(17)
arg_1 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '1')
arg_neg1 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '1')
arg_2 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '2')
arg_neg2 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '2')
arg_3 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '3')
arg_neg3 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '3')
arg_4 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '4')
arg_neg4 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '4')
arg_5 = af_result.get_argument_for_name(LITERAL_PREFIX_STATEMENT + '5')
arg_neg5 = af_result.get_argument_for_name(LITERAL_PREFIX_NOT + LITERAL_PREFIX_STATEMENT + '5')
arg_r1 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '1')
arg_r2 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '2')
arg_r3 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '3')
arg_r4 = af_result.get_argument_for_name(LITERAL_PREFIX_INFERENCE_RULE + '4')
arg_u3 = af_result.get_argument_for_name(DUMMY_LITERAL_NAME_OPINION + '_' + '3')
arg_u4 = af_result.get_argument_for_name(DUMMY_LITERAL_NAME_OPINION + '_' + '4')
arg_uneg5 = af_result.get_argument_for_name(DUMMY_LITERAL_NAME_OPINION + '_' + LITERAL_PREFIX_NOT + '5')
# Attacks between statement args
af_reference.set_attack(arg_1, arg_neg1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg1, arg_1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_2, arg_neg2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg2, arg_2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_3, arg_neg3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg3, arg_3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_4, arg_neg4, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg4, arg_4, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_5, arg_neg5, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg5, arg_5, AF.DEFINITE_ATTACK)
# Undermining attacks by negated premises
af_reference.set_attack(arg_neg2, arg_r1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg3, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg4, arg_r3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg5, arg_r4, AF.DEFINITE_ATTACK)
# Rebutting attacks between rules and negated conclusions
af_reference.set_attack(arg_r1, arg_neg1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg1, arg_r1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r2, arg_1, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_1, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r3, arg_2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_2, arg_r3, AF.DEFINITE_ATTACK)
# Undercutting attacks
af_reference.set_attack(arg_r4, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r2, arg_r4, AF.DEFINITE_ATTACK)
# Attacks between user opinion args and negated statements
af_reference.set_attack(arg_u3, arg_neg3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg3, arg_u3, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_u4, arg_neg4, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_neg4, arg_u4, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_uneg5, arg_5, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_5, arg_uneg5, AF.DEFINITE_ATTACK)
# Rebutting attacks between rule arguments with conflicting conclusions
af_reference.set_attack(arg_r1, arg_r2, AF.DEFINITE_ATTACK)
af_reference.set_attack(arg_r2, arg_r1, AF.DEFINITE_ATTACK)
self.assertTrue(af_reference.is_equivalent_to(af_result))
if __name__ == '__main__':
unittest.main()
| 55.132128
| 112
| 0.709284
| 5,498
| 39,640
| 4.653692
| 0.024009
| 0.101892
| 0.119831
| 0.171187
| 0.980341
| 0.977722
| 0.977722
| 0.97741
| 0.975299
| 0.975299
| 0
| 0.0273
| 0.19884
| 39,640
| 718
| 113
| 55.208914
| 0.778355
| 0.058375
| 0
| 0.878307
| 0
| 0
| 0.054942
| 0.010892
| 0
| 0
| 0
| 0
| 0.015873
| 1
| 0.015873
| false
| 0
| 0.058201
| 0
| 0.075838
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
47de4a8631301441c14c8ec0c7bed88b1d9a0590
| 4,988
|
py
|
Python
|
tests/_display/test_line_alpha_interface.py
|
ynsnf/apysc
|
b10ffaf76ec6beb187477d0a744fca00e3efc3fb
|
[
"MIT"
] | 16
|
2021-04-16T02:01:29.000Z
|
2022-01-01T08:53:49.000Z
|
tests/_display/test_line_alpha_interface.py
|
ynsnf/apysc
|
b10ffaf76ec6beb187477d0a744fca00e3efc3fb
|
[
"MIT"
] | 613
|
2021-03-24T03:37:38.000Z
|
2022-03-26T10:58:37.000Z
|
tests/_display/test_line_alpha_interface.py
|
simon-ritchie/apyscript
|
c319f8ab2f1f5f7fad8d2a8b4fc06e7195476279
|
[
"MIT"
] | 2
|
2021-06-20T07:32:58.000Z
|
2021-12-26T08:22:11.000Z
|
import re
from random import randint
from typing import Match
from typing import Optional
from retrying import retry
import apysc as ap
from apysc._display.line_alpha_interface import LineAlphaInterface
from apysc._expression import expression_data_util
from apysc._expression import var_names
class TestLineAlphaInterface:
@retry(stop_max_attempt_number=15, wait_fixed=randint(10, 3000))
def test_line_alpha(self) -> None:
line_alpha_interface: LineAlphaInterface = LineAlphaInterface()
line_alpha_interface.variable_name = 'test_line_alpha_interface'
line_alpha_interface.line_alpha = ap.Number(0.3)
assert line_alpha_interface.line_alpha == 0.3
@retry(stop_max_attempt_number=15, wait_fixed=randint(10, 3000))
def test__append_line_alpha_update_expression(self) -> None:
line_alpha_interface: LineAlphaInterface = LineAlphaInterface()
line_alpha_interface.variable_name = 'test_line_alpha_interface'
expression_data_util.empty_expression()
line_alpha_interface.line_alpha = ap.Number(0.5)
expression: str = expression_data_util.get_current_expression()
match: Optional[Match] = re.search(
pattern=(
r'test_line_alpha_interface\.stroke'
rf'\({{opacity: {var_names.NUMBER}_.+?}}\);'
),
string=expression, flags=re.MULTILINE)
assert match is not None
@retry(stop_max_attempt_number=15, wait_fixed=randint(10, 3000))
def test__update_line_alpha_and_skip_appending_exp(self) -> None:
line_alpha_interface: LineAlphaInterface = LineAlphaInterface()
line_alpha_interface.variable_name = 'test_line_alpha_interface'
expression_data_util.empty_expression()
line_alpha_interface._update_line_alpha_and_skip_appending_exp(
value=ap.Number(0.25))
assert line_alpha_interface.line_alpha == 0.25
expression: str = expression_data_util.get_current_expression()
assert 'stroke-opacity' not in expression
line_alpha_interface._update_line_alpha_and_skip_appending_exp(
value=0.3)
assert line_alpha_interface.line_alpha == 0.3
@retry(stop_max_attempt_number=15, wait_fixed=randint(10, 3000))
def test__initialize_line_alpha_if_not_initialized(self) -> None:
line_alpha_interface: LineAlphaInterface = LineAlphaInterface()
line_alpha_interface.variable_name = 'test_line_alpha_interface'
line_alpha_interface._initialize_line_alpha_if_not_initialized()
assert line_alpha_interface.line_alpha == 1.0
line_alpha_interface.line_alpha = ap.Number(0.5)
line_alpha_interface._initialize_line_alpha_if_not_initialized()
assert line_alpha_interface.line_alpha == 0.5
@retry(stop_max_attempt_number=15, wait_fixed=randint(10, 3000))
def test__make_snapshot(self) -> None:
line_alpha_interface: LineAlphaInterface = LineAlphaInterface()
line_alpha_interface.variable_name = 'test_line_alpha_interface'
line_alpha_interface.line_alpha = ap.Number(0.5)
snapshot_name: str = 'snapshot_1'
line_alpha_interface._run_all_make_snapshot_methods(
snapshot_name=snapshot_name)
assert (
line_alpha_interface._line_alpha_snapshots[snapshot_name] == 0.5)
line_alpha_interface.line_alpha = ap.Number(0.3)
line_alpha_interface._run_all_make_snapshot_methods(
snapshot_name=snapshot_name)
assert (
line_alpha_interface._line_alpha_snapshots[snapshot_name] == 0.5)
@retry(stop_max_attempt_number=15, wait_fixed=randint(10, 3000))
def test__revert(self) -> None:
line_alpha_interface: LineAlphaInterface = LineAlphaInterface()
line_alpha_interface.variable_name = 'test_line_alpha_interface'
line_alpha_interface.line_alpha = ap.Number(0.5)
snapshot_name: str = 'snapshot_1'
line_alpha_interface._run_all_make_snapshot_methods(
snapshot_name=snapshot_name)
line_alpha_interface.line_alpha = ap.Number(0.3)
line_alpha_interface._run_all_revert_methods(
snapshot_name=snapshot_name)
assert line_alpha_interface.line_alpha == 0.5
line_alpha_interface.line_alpha = ap.Number(0.3)
line_alpha_interface._run_all_revert_methods(
snapshot_name=snapshot_name)
assert line_alpha_interface.line_alpha == 0.3
@retry(stop_max_attempt_number=15, wait_fixed=randint(10, 3000))
def test__append_line_alpha_attr_linking_setting(self) -> None:
interface: LineAlphaInterface = LineAlphaInterface()
interface.variable_name = 'test_line_alpha_interface'
interface._initialize_line_alpha_if_not_initialized()
assert interface._attr_linking_stack['line_alpha'] == \
[ap.Number(1.0)]
| 47.056604
| 78
| 0.719326
| 614
| 4,988
| 5.371336
| 0.135179
| 0.20467
| 0.256519
| 0.140085
| 0.815343
| 0.815343
| 0.80473
| 0.771073
| 0.724985
| 0.708005
| 0
| 0.025259
| 0.206295
| 4,988
| 105
| 79
| 47.504762
| 0.80778
| 0
| 0
| 0.622222
| 0
| 0
| 0.059799
| 0.048126
| 0
| 0
| 0
| 0
| 0.133333
| 1
| 0.077778
| false
| 0
| 0.1
| 0
| 0.188889
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
47fd296ad1a6b8222bf15984c6ce1eda27244b40
| 332,918
|
py
|
Python
|
src/USGS_CatalogUpdater.py
|
UCHIC/webtsa-utilities
|
c6c3e8d68d0f2ddac9c60e269293008a57b54d70
|
[
"BSD-3-Clause"
] | 2
|
2017-12-22T00:29:10.000Z
|
2017-12-22T11:13:31.000Z
|
src/USGS_CatalogUpdater.py
|
UCHIC/webtsa-utilities
|
c6c3e8d68d0f2ddac9c60e269293008a57b54d70
|
[
"BSD-3-Clause"
] | 5
|
2017-12-22T17:33:08.000Z
|
2021-12-13T19:46:14.000Z
|
src/USGS_CatalogUpdater.py
|
UCHIC/webtsa-utilities
|
c6c3e8d68d0f2ddac9c60e269293008a57b54d70
|
[
"BSD-3-Clause"
] | 1
|
2019-06-12T23:48:40.000Z
|
2019-06-12T23:48:40.000Z
|
import datetime
import pandas
import re
import requests
import sys
import urllib
from influxdb import DataFrameClient
from influxdb.exceptions import InfluxDBClientError
from Tools.WaterMLParser import *
from InfluxHelper import *
from SqlSnippets import *
usgs_sites_insert_query = """
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10163000 ', 'PROVO RIVER AT PROVO, UT ', 40.2377317999999988, -111.699370000000002, 'Utah ', NULL, 'Stream', '65', 'Gage height, feet', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10163000&startDT=2007-10-01T00:0¶meterCd=00065', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_PROVO_20RIVER_20AT_20PROVO_2C_20UT_20_65_4_00065%%22', NULL, 'usgs_PROVO_20RIVER_20AT_20PROVO_2C_20UT_20_65_4_00065', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10172200 ', 'RED BUTTE CREEK AT FORT DOUGLAS, NEAR SLC, UT ', 40.7799462700000035, -111.806044999999997, 'Utah ', NULL, 'Stream', '10', 'Temperature, water, degrees Celsius', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'deg C', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1964-04-15 00:00:00', '2014-05-13 00:00:00', -7, 7035, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10172200&startDT=1964-04-15¶meterCd=00010&statCd=00001', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_RED_20BUTTE_20CREEK_20AT_20FORT_20DOUGLAS_2C_20NEAR_20SLC_2C_20UT_20_10_5_00001%%22', NULL, 'usgs_RED_20BUTTE_20CREEK_20AT_20FORT_20DOUGLAS_2C_20NEAR_20SLC_2C_20UT_20_10_5_00001', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10172200 ', 'RED BUTTE CREEK AT FORT DOUGLAS, NEAR SLC, UT ', 40.7799462700000035, -111.806044999999997, 'Utah ', NULL, 'Stream', '10', 'Temperature, water, degrees Celsius', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'deg C', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1964-04-15 00:00:00', '2014-05-13 00:00:00', -7, 7065, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10172200&startDT=1964-04-15¶meterCd=00010&statCd=00002', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_RED_20BUTTE_20CREEK_20AT_20FORT_20DOUGLAS_2C_20NEAR_20SLC_2C_20UT_20_10_5_00002%%22', NULL, 'usgs_RED_20BUTTE_20CREEK_20AT_20FORT_20DOUGLAS_2C_20NEAR_20SLC_2C_20UT_20_10_5_00002', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10172200 ', 'RED BUTTE CREEK AT FORT DOUGLAS, NEAR SLC, UT ', 40.7799462700000035, -111.806044999999997, 'Utah ', NULL, 'Stream', '10', 'Temperature, water, degrees Celsius', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'deg C', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1998-10-22 00:00:00', '2014-05-13 00:00:00', -7, 2222, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10172200&startDT=1998-10-22¶meterCd=00010&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_RED_20BUTTE_20CREEK_20AT_20FORT_20DOUGLAS_2C_20NEAR_20SLC_2C_20UT_20_10_5_00003%%22', NULL, 'usgs_RED_20BUTTE_20CREEK_20AT_20FORT_20DOUGLAS_2C_20NEAR_20SLC_2C_20UT_20_10_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10168000 ', 'LITTLE COTTONWOOD CREEK @ JORDAN RIVER NR SLC ', 40.6638360000000034, -111.901880199999994, 'Utah ', NULL, 'Stream', '10', 'Temperature, water, degrees Celsius', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'deg C', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1998-10-01 00:00:00', '2002-09-30 00:00:00', -7, 1432, '2002-09-30 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10168000&startDT=1998-10-01¶meterCd=00010&statCd=00001', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20JORDAN_20RIVER_20NR_20SLC_20_10_5_00001%%22', NULL, 'usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20JORDAN_20RIVER_20NR_20SLC_20_10_5_00001', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10168000 ', 'LITTLE COTTONWOOD CREEK @ JORDAN RIVER NR SLC ', 40.6638360000000034, -111.901880199999994, 'Utah ', NULL, 'Stream', '10', 'Temperature, water, degrees Celsius', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'deg C', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1998-10-01 00:00:00', '2002-09-30 00:00:00', -7, 1432, '2002-09-30 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10168000&startDT=1998-10-01¶meterCd=00010&statCd=00002', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20JORDAN_20RIVER_20NR_20SLC_20_10_5_00002%%22', NULL, 'usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20JORDAN_20RIVER_20NR_20SLC_20_10_5_00002', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10168000 ', 'LITTLE COTTONWOOD CREEK @ JORDAN RIVER NR SLC ', 40.6638360000000034, -111.901880199999994, 'Utah ', NULL, 'Stream', '10', 'Temperature, water, degrees Celsius', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'deg C', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1998-10-01 00:00:00', '2002-09-30 00:00:00', -7, 1439, '2002-09-30 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10168000&startDT=1998-10-01¶meterCd=00010&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20JORDAN_20RIVER_20NR_20SLC_20_10_5_00003%%22', NULL, 'usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20JORDAN_20RIVER_20NR_20SLC_20_10_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10113500 ', 'BLACKSMITH FORK AB UP and L CO.''S DAM NR HYRUM, UT ', 41.6235452000000024, -111.738828600000005, 'Utah ', NULL, 'Stream', '45', 'Precipitation, total, inches', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'in', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2001-10-01 00:00:00', '2014-05-13 00:00:00', -7, 4481, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10113500&startDT=2001-10-01¶meterCd=00045&statCd=00006', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_BLACKSMITH_20FORK_20AB_20UP_20and_20L_20CO__27S_20DAM_20NR_20HYRUM_2C_20UT_20_45_5_00006%%22', NULL, 'usgs_BLACKSMITH_20FORK_20AB_20UP_20and_20L_20CO__27S_20DAM_20NR_20HYRUM_2C_20UT_20_45_5_00006', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10171000 ', 'JORDAN RIVER @ 1700 SOUTH @ SALT LAKE CITY, UT ', 40.7335573699999998, -111.923270299999999, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1942-12-01 00:00:00', '2014-05-13 00:00:00', -7, 26097, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10171000&startDT=1942-12-01¶meterCd=00060&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_JORDAN_20RIVER_20_40_201700_20SOUTH_20_40_20SALT_20LAKE_20CITY_2C_20UT_20_60_5_00003%%22', NULL, 'usgs_JORDAN_20RIVER_20_40_201700_20SOUTH_20_40_20SALT_20LAKE_20CITY_2C_20UT_20_60_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10172200 ', 'RED BUTTE CREEK AT FORT DOUGLAS, NEAR SLC, UT ', 40.7799462700000035, -111.806044999999997, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1963-10-01 00:00:00', '2014-05-13 00:00:00', -7, 18483, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10172200&startDT=1963-10-01¶meterCd=00060&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_RED_20BUTTE_20CREEK_20AT_20FORT_20DOUGLAS_2C_20NEAR_20SLC_2C_20UT_20_60_5_00003%%22', NULL, 'usgs_RED_20BUTTE_20CREEK_20AT_20FORT_20DOUGLAS_2C_20NEAR_20SLC_2C_20UT_20_60_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10172371 ', 'SO CONDUIT OF 8TH SO CONDUITS @ JORDAN RIV @ SLC ', 40.7543904799999979, -111.922437000000002, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1980-06-01 00:00:00', '1981-09-30 00:00:00', -7, 386, '1981-09-30 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10172371&startDT=1980-06-01¶meterCd=00060&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_SO_20CONDUIT_20OF_208TH_20SO_20CONDUITS_20_40_20JORDAN_20RIV_20_40_20SLC_20_60_5_00003%%22', NULL, 'usgs_SO_20CONDUIT_20OF_208TH_20SO_20CONDUITS_20_40_20JORDAN_20RIV_20_40_20SLC_20_60_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10172630 ', 'GOGGIN DRAIN NEAR MAGNA UTAH ', 40.8166110700000004, -112.100776699999997, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1963-10-01 00:00:00', '2014-05-13 00:00:00', -7, 9140, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10172630&startDT=1963-10-01¶meterCd=00060&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_GOGGIN_20DRAIN_20NEAR_20MAGNA_20UTAH_20_60_5_00003%%22', NULL, 'usgs_GOGGIN_20DRAIN_20NEAR_20MAGNA_20UTAH_20_60_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10105900 ', 'LITTLE BEAR RIVER AT PARADISE, UT ', 41.5754897999999997, -111.855220299999999, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1991-10-01 00:00:00', '2014-05-13 00:00:00', -7, 7897, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10105900&startDT=1991-10-01¶meterCd=00060&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_LITTLE_20BEAR_20RIVER_20AT_20PARADISE_2C_20UT_20_60_5_00003%%22', NULL, 'usgs_LITTLE_20BEAR_20RIVER_20AT_20PARADISE_2C_20UT_20_60_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10108400 ', 'CACHE HIGHLINE CANAL NEAR LOGAN, UTAH ', 41.742986860000002, -111.761886000000004, 'Utah ', NULL, 'Canal', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1963-05-01 00:00:00', '2014-05-13 00:00:00', -7, 18641, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10108400&startDT=1963-05-01¶meterCd=00060&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_CACHE_20HIGHLINE_20CANAL_20NEAR_20LOGAN_2C_20UTAH_20_60_5_00003%%22', NULL, 'usgs_CACHE_20HIGHLINE_20CANAL_20NEAR_20LOGAN_2C_20UTAH_20_60_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10168000 ', 'LITTLE COTTONWOOD CREEK @ JORDAN RIVER NR SLC ', 40.6638360000000034, -111.901880199999994, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1980-02-20 00:00:00', '2014-05-13 00:00:00', -7, 9870, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10168000&startDT=1980-02-20¶meterCd=00060&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20JORDAN_20RIVER_20NR_20SLC_20_60_5_00003%%22', NULL, 'usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20JORDAN_20RIVER_20NR_20SLC_20_60_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10170490 ', 'COM FLW JORDAN RIVER & SURPLUS CANAL @ SLC, UT ', 40.7268907600000034, -111.926603700000001, 'Utah ', NULL, 'Canal', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1942-12-01 00:00:00', '2013-09-30 00:00:00', -7, 25872, '2013-09-30 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10170490&startDT=1942-12-01¶meterCd=00060&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_COM_20FLW_20JORDAN_20RIVER_20_26_20SURPLUS_20CANAL_20_40_20SLC_2C_20UT_20_60_5_00003%%22', NULL, 'usgs_COM_20FLW_20JORDAN_20RIVER_20_26_20SURPLUS_20CANAL_20_40_20SLC_2C_20UT_20_60_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10170500 ', 'SURPLUS CANAL @ SALT LAKE CITY, UT ', 40.7268907600000034, -111.926603700000001, 'Utah ', NULL, 'Canal', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1942-12-01 00:00:00', '2014-05-13 00:00:00', -7, 26094, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10170500&startDT=1942-12-01¶meterCd=00060&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_SURPLUS_20CANAL_20_40_20SALT_20LAKE_20CITY_2C_20UT_20_60_5_00003%%22', NULL, 'usgs_SURPLUS_20CANAL_20_40_20SALT_20LAKE_20CITY_2C_20UT_20_60_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10155500 ', 'PROVO RIVER NEAR CHARLESTON, UT ', 40.4841221000000004, -111.4635198, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1938-10-01 00:00:00', '2014-05-13 00:00:00', -7, 12644, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10155500&startDT=1938-10-01¶meterCd=00060&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_PROVO_20RIVER_20NEAR_20CHARLESTON_2C_20UT_20_60_5_00003%%22', NULL, 'usgs_PROVO_20RIVER_20NEAR_20CHARLESTON_2C_20UT_20_60_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10156000 ', 'SNAKE CREEK NEAR CHARLESTON, UT ', 40.4852332000000033, -111.467131199999997, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1938-10-01 00:00:00', '2014-05-13 00:00:00', -7, 12081, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10156000&startDT=1938-10-01¶meterCd=00060&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_SNAKE_20CREEK_20NEAR_20CHARLESTON_2C_20UT_20_60_5_00003%%22', NULL, 'usgs_SNAKE_20CREEK_20NEAR_20CHARLESTON_2C_20UT_20_60_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10157500 ', 'DANIELS CREEK AT CHARLESTON, UT ', 40.4607890899999987, -111.472686999999993, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1993-05-01 00:00:00', '2014-05-13 00:00:00', -7, 7683, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10157500&startDT=1993-05-01¶meterCd=00060&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_DANIELS_20CREEK_20AT_20CHARLESTON_2C_20UT_20_60_5_00003%%22', NULL, 'usgs_DANIELS_20CREEK_20AT_20CHARLESTON_2C_20UT_20_60_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10163000 ', 'PROVO RIVER AT PROVO, UT ', 40.2377317999999988, -111.699370000000002, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2003-10-01 00:00:00', '2014-05-13 00:00:00', -7, 29086, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10163000&startDT=2003-10-01¶meterCd=00060&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_PROVO_20RIVER_20AT_20PROVO_2C_20UT_20_60_5_00003%%22', NULL, 'usgs_PROVO_20RIVER_20AT_20PROVO_2C_20UT_20_60_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10167170 ', 'BUTTERFIELD CREEK NEAR LARK, UTAH ', 40.5077777799999978, -112.107500000000002, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2009-10-01 00:00:00', '2014-05-13 00:00:00', -7, 1686, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10167170&startDT=2009-10-01¶meterCd=00060&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_BUTTERFIELD_20CREEK_20NEAR_20LARK_2C_20UTAH_20_60_5_00003%%22', NULL, 'usgs_BUTTERFIELD_20CREEK_20NEAR_20LARK_2C_20UTAH_20_60_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10167800 ', 'LITTLE COTTONWOOD CREEK @ CRESTWOOD PARK @ SLC ', 40.6143923799999982, -111.842989200000005, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1987-10-01 00:00:00', '2001-09-30 00:00:00', -7, 2451, '2001-09-30 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10167800&startDT=1987-10-01¶meterCd=00060&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20CRESTWOOD_20PARK_20_40_20SLC_20_60_5_00003%%22', NULL, 'usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20CRESTWOOD_20PARK_20_40_20SLC_20_60_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10113500 ', 'BLACKSMITH FORK AB UP and L CO.''S DAM NR HYRUM, UT ', 41.6235452000000024, -111.738828600000005, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2013-11-28 00:00:00', '2014-05-13 00:00:00', -7, 35262, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10113500&startDT=2013-11-28¶meterCd=00060&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_BLACKSMITH_20FORK_20AB_20UP_20and_20L_20CO__27S_20DAM_20NR_20HYRUM_2C_20UT_20_60_5_00003%%22', NULL, 'usgs_BLACKSMITH_20FORK_20AB_20UP_20and_20L_20CO__27S_20DAM_20NR_20HYRUM_2C_20UT_20_60_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10154200 ', 'PROVO RIVER NEAR WOODLAND, UT ', 40.5577278000000021, -111.1687838, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1963-07-12 00:00:00', '2014-05-13 00:00:00', -7, 18569, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10154200&startDT=1963-07-12¶meterCd=00060&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_PROVO_20RIVER_20NEAR_20WOODLAND_2C_20UT_20_60_5_00003%%22', NULL, 'usgs_PROVO_20RIVER_20NEAR_20WOODLAND_2C_20UT_20_60_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10155000 ', 'PROVO RIVER NEAR HAILSTONE, UT ', 40.6007855499999977, -111.331571999999994, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1949-10-01 00:00:00', '2014-05-13 00:00:00', -7, 23601, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10155000&startDT=1949-10-01¶meterCd=00060&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_PROVO_20RIVER_20NEAR_20HAILSTONE_2C_20UT_20_60_5_00003%%22', NULL, 'usgs_PROVO_20RIVER_20NEAR_20HAILSTONE_2C_20UT_20_60_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10155200 ', 'PROVO RIV AT RIV ROAD BRIDGE NR HEBER CITY, UT ', 40.5543980500000032, -111.4332426, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2001-09-08 00:00:00', '2014-05-13 00:00:00', -7, 4631, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10155200&startDT=2001-09-08¶meterCd=00060&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_PROVO_20RIV_20AT_20RIV_20ROAD_20BRIDGE_20NR_20HEBER_20CITY_2C_20UT_20_60_5_00003%%22', NULL, 'usgs_PROVO_20RIV_20AT_20RIV_20ROAD_20BRIDGE_20NR_20HEBER_20CITY_2C_20UT_20_60_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10105900 ', 'LITTLE BEAR RIVER AT PARADISE, UT ', 41.5754897999999997, -111.855220299999999, 'Utah ', NULL, 'Stream', '65', 'Gage height, feet', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1992-10-01 00:00:00', '2005-09-30 00:00:00', -7, 4689, '2005-09-30 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10105900&startDT=1992-10-01¶meterCd=00065&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_LITTLE_20BEAR_20RIVER_20AT_20PARADISE_2C_20UT_20_65_5_00003%%22', NULL, 'usgs_LITTLE_20BEAR_20RIVER_20AT_20PARADISE_2C_20UT_20_65_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10113500 ', 'BLACKSMITH FORK AB UP and L CO.''S DAM NR HYRUM, UT ', 41.6235452000000024, -111.738828600000005, 'Utah ', NULL, 'Stream', '65', 'Gage height, feet', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1994-10-01 00:00:00', '2012-10-10 00:00:00', -7, 5188, '2012-10-10 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10113500&startDT=1994-10-01¶meterCd=00065&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_BLACKSMITH_20FORK_20AB_20UP_20and_20L_20CO__27S_20DAM_20NR_20HYRUM_2C_20UT_20_65_5_00003%%22', NULL, 'usgs_BLACKSMITH_20FORK_20AB_20UP_20and_20L_20CO__27S_20DAM_20NR_20HYRUM_2C_20UT_20_65_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10155200 ', 'PROVO RIV AT RIV ROAD BRIDGE NR HEBER CITY, UT ', 40.5543980500000032, -111.4332426, 'Utah ', NULL, 'Stream', '65', 'Gage height, feet', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2001-09-08 00:00:00', '2014-05-13 00:00:00', -7, 4575, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10155200&startDT=2001-09-08¶meterCd=00065&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_PROVO_20RIV_20AT_20RIV_20ROAD_20BRIDGE_20NR_20HEBER_20CITY_2C_20UT_20_65_5_00003%%22', NULL, 'usgs_PROVO_20RIV_20AT_20RIV_20ROAD_20BRIDGE_20NR_20HEBER_20CITY_2C_20UT_20_65_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10156000 ', 'SNAKE CREEK NEAR CHARLESTON, UT ', 40.4852332000000033, -111.467131199999997, 'Utah ', NULL, 'Stream', '65', 'Gage height, feet', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1993-04-16 00:00:00', '2014-05-13 00:00:00', -7, 7614, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10156000&startDT=1993-04-16¶meterCd=00065&statCd=00001', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_SNAKE_20CREEK_20NEAR_20CHARLESTON_2C_20UT_20_65_5_00001%%22', NULL, 'usgs_SNAKE_20CREEK_20NEAR_20CHARLESTON_2C_20UT_20_65_5_00001', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10156000 ', 'SNAKE CREEK NEAR CHARLESTON, UT ', 40.4852332000000033, -111.467131199999997, 'Utah ', NULL, 'Stream', '65', 'Gage height, feet', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1992-10-01 00:00:00', '2014-05-13 00:00:00', -7, 7618, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10156000&startDT=1992-10-01¶meterCd=00065&statCd=00002', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_SNAKE_20CREEK_20NEAR_20CHARLESTON_2C_20UT_20_65_5_00002%%22', NULL, 'usgs_SNAKE_20CREEK_20NEAR_20CHARLESTON_2C_20UT_20_65_5_00002', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10156000 ', 'SNAKE CREEK NEAR CHARLESTON, UT ', 40.4852332000000033, -111.467131199999997, 'Utah ', NULL, 'Stream', '65', 'Gage height, feet', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1992-10-01 00:00:00', '2014-05-13 00:00:00', -7, 7618, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10156000&startDT=1992-10-01¶meterCd=00065&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_SNAKE_20CREEK_20NEAR_20CHARLESTON_2C_20UT_20_65_5_00003%%22', NULL, 'usgs_SNAKE_20CREEK_20NEAR_20CHARLESTON_2C_20UT_20_65_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10157500 ', 'DANIELS CREEK AT CHARLESTON, UT ', 40.4607890899999987, -111.472686999999993, 'Utah ', NULL, 'Stream', '65', 'Gage height, feet', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1993-04-24 00:00:00', '2014-05-13 00:00:00', -7, 6655, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10157500&startDT=1993-04-24¶meterCd=00065&statCd=00001', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_DANIELS_20CREEK_20AT_20CHARLESTON_2C_20UT_20_65_5_00001%%22', NULL, 'usgs_DANIELS_20CREEK_20AT_20CHARLESTON_2C_20UT_20_65_5_00001', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10157500 ', 'DANIELS CREEK AT CHARLESTON, UT ', 40.4607890899999987, -111.472686999999993, 'Utah ', NULL, 'Stream', '65', 'Gage height, feet', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1992-10-01 00:00:00', '2014-05-13 00:00:00', -7, 6659, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10157500&startDT=1992-10-01¶meterCd=00065&statCd=00002', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_DANIELS_20CREEK_20AT_20CHARLESTON_2C_20UT_20_65_5_00002%%22', NULL, 'usgs_DANIELS_20CREEK_20AT_20CHARLESTON_2C_20UT_20_65_5_00002', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10157500 ', 'DANIELS CREEK AT CHARLESTON, UT ', 40.4607890899999987, -111.472686999999993, 'Utah ', NULL, 'Stream', '65', 'Gage height, feet', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1992-10-01 00:00:00', '2014-05-13 00:00:00', -7, 6679, '2014-05-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10157500&startDT=1992-10-01¶meterCd=00065&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_DANIELS_20CREEK_20AT_20CHARLESTON_2C_20UT_20_65_5_00003%%22', NULL, 'usgs_DANIELS_20CREEK_20AT_20CHARLESTON_2C_20UT_20_65_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10172200 ', 'RED BUTTE CREEK AT FORT DOUGLAS, NEAR SLC, UT ', 40.7799462700000035, -111.806044999999997, 'Utah ', NULL, 'Stream', '95', 'Specific conductance, water, unfiltered, microsiemens per centimeter at 25 degrees Celsius', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'uS/cm @25C', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-10-01 00:00:00', '2002-09-30 00:00:00', -7, 969, '2002-09-30 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10172200&startDT=1999-10-01¶meterCd=00095&statCd=00001', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_RED_20BUTTE_20CREEK_20AT_20FORT_20DOUGLAS_2C_20NEAR_20SLC_2C_20UT_20_95_5_00001%%22', NULL, 'usgs_RED_20BUTTE_20CREEK_20AT_20FORT_20DOUGLAS_2C_20NEAR_20SLC_2C_20UT_20_95_5_00001', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10172200 ', 'RED BUTTE CREEK AT FORT DOUGLAS, NEAR SLC, UT ', 40.7799462700000035, -111.806044999999997, 'Utah ', NULL, 'Stream', '95', 'Specific conductance, water, unfiltered, microsiemens per centimeter at 25 degrees Celsius', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'uS/cm @25C', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-10-01 00:00:00', '2002-09-30 00:00:00', -7, 969, '2002-09-30 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10172200&startDT=1999-10-01¶meterCd=00095&statCd=00002', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_RED_20BUTTE_20CREEK_20AT_20FORT_20DOUGLAS_2C_20NEAR_20SLC_2C_20UT_20_95_5_00002%%22', NULL, 'usgs_RED_20BUTTE_20CREEK_20AT_20FORT_20DOUGLAS_2C_20NEAR_20SLC_2C_20UT_20_95_5_00002', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10172200 ', 'RED BUTTE CREEK AT FORT DOUGLAS, NEAR SLC, UT ', 40.7799462700000035, -111.806044999999997, 'Utah ', NULL, 'Stream', '95', 'Specific conductance, water, unfiltered, microsiemens per centimeter at 25 degrees Celsius', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'uS/cm @25C', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-10-01 00:00:00', '2002-09-30 00:00:00', -7, 969, '2002-09-30 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10172200&startDT=1999-10-01¶meterCd=00095&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_RED_20BUTTE_20CREEK_20AT_20FORT_20DOUGLAS_2C_20NEAR_20SLC_2C_20UT_20_95_5_00003%%22', NULL, 'usgs_RED_20BUTTE_20CREEK_20AT_20FORT_20DOUGLAS_2C_20NEAR_20SLC_2C_20UT_20_95_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10167800 ', 'LITTLE COTTONWOOD CREEK @ CRESTWOOD PARK @ SLC ', 40.6143923799999982, -111.842989200000005, 'Utah ', NULL, 'Stream', '95', 'Specific conductance, water, unfiltered, microsiemens per centimeter at 25 degrees Celsius', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'uS/cm @25C', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-02-18 00:00:00', '2000-09-30 00:00:00', -7, 495, '2000-09-30 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10167800&startDT=1999-02-18¶meterCd=00095&statCd=00001', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20CRESTWOOD_20PARK_20_40_20SLC_20_95_5_00001%%22', NULL, 'usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20CRESTWOOD_20PARK_20_40_20SLC_20_95_5_00001', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403918111584201 ', ' (C- 2- 1) 8cdc- 1 ', 40.6549388899999968, -111.978255599999997, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-10-18 00:00:00', '2013-10-31 00:00:00', NULL, 28, '2013-10-31 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403918111584201&startDT=1999-10-18T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__201_29_208cdc__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__201_29_208cdc__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10167800 ', 'LITTLE COTTONWOOD CREEK @ CRESTWOOD PARK @ SLC ', 40.6143923799999982, -111.842989200000005, 'Utah ', NULL, 'Stream', '95', 'Specific conductance, water, unfiltered, microsiemens per centimeter at 25 degrees Celsius', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'uS/cm @25C', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-02-18 00:00:00', '2000-09-30 00:00:00', -7, 495, '2000-09-30 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10167800&startDT=1999-02-18¶meterCd=00095&statCd=00002', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20CRESTWOOD_20PARK_20_40_20SLC_20_95_5_00002%%22', NULL, 'usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20CRESTWOOD_20PARK_20_40_20SLC_20_95_5_00002', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10167800 ', 'LITTLE COTTONWOOD CREEK @ CRESTWOOD PARK @ SLC ', 40.6143923799999982, -111.842989200000005, 'Utah ', NULL, 'Stream', '95', 'Specific conductance, water, unfiltered, microsiemens per centimeter at 25 degrees Celsius', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'uS/cm @25C', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-02-18 00:00:00', '2000-09-30 00:00:00', -7, 495, '2000-09-30 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10167800&startDT=1999-02-18¶meterCd=00095&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20CRESTWOOD_20PARK_20_40_20SLC_20_95_5_00003%%22', NULL, 'usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20CRESTWOOD_20PARK_20_40_20SLC_20_95_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10168000 ', 'LITTLE COTTONWOOD CREEK @ JORDAN RIVER NR SLC ', 40.6638360000000034, -111.901880199999994, 'Utah ', NULL, 'Stream', '95', 'Specific conductance, water, unfiltered, microsiemens per centimeter at 25 degrees Celsius', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'uS/cm @25C', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1998-10-01 00:00:00', '2002-09-30 00:00:00', -7, 1397, '2002-09-30 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10168000&startDT=1998-10-01¶meterCd=00095&statCd=00001', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20JORDAN_20RIVER_20NR_20SLC_20_95_5_00001%%22', NULL, 'usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20JORDAN_20RIVER_20NR_20SLC_20_95_5_00001', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10168000 ', 'LITTLE COTTONWOOD CREEK @ JORDAN RIVER NR SLC ', 40.6638360000000034, -111.901880199999994, 'Utah ', NULL, 'Stream', '95', 'Specific conductance, water, unfiltered, microsiemens per centimeter at 25 degrees Celsius', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'uS/cm @25C', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1998-10-01 00:00:00', '2002-09-30 00:00:00', -7, 1393, '2002-09-30 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10168000&startDT=1998-10-01¶meterCd=00095&statCd=00002', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20JORDAN_20RIVER_20NR_20SLC_20_95_5_00002%%22', NULL, 'usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20JORDAN_20RIVER_20NR_20SLC_20_95_5_00002', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403919111502601 ', ' (D- 2- 1) 9dca- 1 ', 40.65522524, -111.8413228, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1981-11-03 00:00:00', '2014-02-05 00:00:00', NULL, 62, '2014-02-05 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403919111502601&startDT=1981-11-03T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__202__201_29_209dca__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__202__201_29_209dca__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (5, 'USGS Daily', '10168000 ', 'LITTLE COTTONWOOD CREEK @ JORDAN RIVER NR SLC ', 40.6638360000000034, -111.901880199999994, 'Utah ', NULL, 'Stream', '95', 'Specific conductance, water, unfiltered, microsiemens per centimeter at 25 degrees Celsius', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'uS/cm @25C', 'Water', 'Field Observation ', NULL, 'Physical ', 1, 'day', 'Time', 'day', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1998-10-01 00:00:00', '2002-09-30 00:00:00', -7, 1402, '2002-09-30 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/dv/?format=waterml,1.1&sites=10168000&startDT=1998-10-01¶meterCd=00095&statCd=00003', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20JORDAN_20RIVER_20NR_20SLC_20_95_5_00003%%22', NULL, 'usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20JORDAN_20RIVER_20NR_20SLC_20_95_5_00003', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '404710111551301 ', ' (B- 1- 1)26cda- 1 ', 40.7860568799999967, -111.921048600000006, 'Utah ', NULL, 'Test hole', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1982-08-19 00:00:00', '2014-02-04 00:00:00', NULL, 171, '2014-02-04 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=404710111551301&startDT=1982-08-19T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28B__201__201_2926cda__201_20_72019_6_72019%%22', NULL, 'usgs__20_28B__201__201_2926cda__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '401654111400101 ', ' (D- 6- 2)24caa- 1 S25 ', 40.2817320600000031, -111.667728800000006, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1981-03-25 00:00:00', '2012-09-26 00:00:00', NULL, 30, '2012-09-26 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=401654111400101&startDT=1981-03-25T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__206__202_2924caa__201_20S25_20_72019_6_72019%%22', NULL, 'usgs__20_28D__206__202_2924caa__201_20S25_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '402750111232701 ', ' (D- 4- 5)16ccd- 1 ', 40.4638439900000009, -111.391570299999998, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1988-10-17 00:00:00', '2013-08-28 00:00:00', NULL, 146, '2013-08-28 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=402750111232701&startDT=1988-10-17T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__204__205_2916ccd__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__204__205_2916ccd__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '402810111263601 ', ' (D- 4- 4)13bdd- 1 ', 40.4693998899999983, -111.444074000000001, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2003-02-24 00:00:00', '2013-08-27 00:00:00', NULL, 32, '2013-08-27 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=402810111263601&startDT=2003-02-24T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__204__204_2913bdd__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__204__204_2913bdd__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '402840111232201 ', ' (D- 4- 5)16bab- 1 ', 40.4777326999999971, -111.390181299999995, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1997-02-13 00:00:00', '2005-09-15 00:00:00', NULL, 94, '2005-09-15 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=402840111232201&startDT=1997-02-13T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__204__205_2916bab__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__204__205_2916bab__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '414649111560301 ', ' (B-12- 1)15adc- 4 ', 41.7802777799999987, -111.934166700000006, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2005-06-13 00:00:00', '2014-04-16 00:00:00', NULL, 56, '2014-04-16 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=414649111560301&startDT=2005-06-13T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28B_12__201_2915adc__204_20_72019_6_72019%%22', NULL, 'usgs__20_28B_12__201_2915adc__204_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '402902111282001 ', ' (D- 4- 4)10daa- 1 ', 40.4838444000000024, -111.472964899999994, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1988-11-30 00:00:00', '2014-03-13 00:00:00', NULL, 160, '2014-03-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=402902111282001&startDT=1988-11-30T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__204__204_2910daa__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__204__204_2910daa__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '402937111214901 ', ' (D- 4- 5) 3dcc- 1 ', 40.4935656999999978, -111.364346100000006, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1988-07-06 00:00:00', '2013-08-28 00:00:00', NULL, 156, '2013-08-28 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=402937111214901&startDT=1988-07-06T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__204__205_29_203dcc__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__204__205_29_203dcc__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '402952111591801 ', ' (C- 4- 1) 6dad- 2 ', 40.4977749999999972, -111.987799999999993, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1985-03-26 00:00:00', '2014-02-06 00:00:00', NULL, 12, '2014-02-06 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=402952111591801&startDT=1985-03-26T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__204__201_29_206dad__202_20_72019_6_72019%%22', NULL, 'usgs__20_28C__204__201_29_206dad__202_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403003111255801 ', ' (D- 4- 5) 6bcc- 2 ', 40.5007883600000014, -111.433517699999996, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1993-08-05 00:00:00', '2013-08-27 00:00:00', NULL, 132, '2013-08-27 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403003111255801&startDT=1993-08-05T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__204__205_29_206bcc__202_20_72019_6_72019%%22', NULL, 'usgs__20_28D__204__205_29_206bcc__202_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403007111523501 ', ' (D- 4- 1) 6adc- 1 ', 40.5012157999999971, -111.875242, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1979-10-01 00:00:00', '2014-02-06 00:00:00', NULL, 12, '2014-02-06 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403007111523501&startDT=1979-10-01T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__204__201_29_206adc__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__204__201_29_206adc__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403020111561901 ', ' (C- 4- 1) 3bad- 1 ', 40.5055043600000033, -111.93938, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1981-09-22 00:00:00', '2014-02-06 00:00:00', NULL, 44, '2014-02-06 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403020111561901&startDT=1981-09-22T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__204__201_29_203bad__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__204__201_29_203bad__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '402842111223601 ', ' (D- 4- 5) 4ddd- 1 ', 40.4930102399999967, -111.376569200000006, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1960-11-30 00:00:00', '2014-03-13 00:00:00', NULL, 203, '2014-03-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=402842111223601&startDT=1960-11-30T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__204__205_29_204ddd__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__204__205_29_204ddd__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '402842111263101 ', ' (D- 4- 4)12dcc- 1 ', 40.4782886799999986, -111.442684999999997, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1949-09-03 00:00:00', '2013-08-27 00:00:00', NULL, 160, '2013-08-27 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=402842111263101&startDT=1949-09-03T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__204__204_2912dcc__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__204__204_2912dcc__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403042111552501 ', ' (C- 3- 1)35cac- 1 ', 40.5116987999999978, -111.923843500000004, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1978-01-05 00:00:00', '2014-02-06 00:00:00', NULL, 11, '2014-02-06 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403042111552501&startDT=1978-01-05T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__203__201_2935cac__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__203__201_2935cac__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403055112060401 ', ' (C- 3- 2)31add- 1 ', 40.5152246999999974, -112.101884699999999, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2000-07-27 00:00:00', '2012-09-24 00:00:00', NULL, 61, '2012-09-24 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403055112060401&startDT=2000-07-27T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__203__202_2931add__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__203__202_2931add__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403055112060402 ', ' (C- 3- 2)31add- 2 ', 40.5152246999999974, -112.101884699999999, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2000-07-27 00:00:00', '2012-09-24 00:00:00', NULL, 54, '2012-09-24 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403055112060402&startDT=2000-07-27T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__203__202_2931add__202_20_72019_6_72019%%22', NULL, 'usgs__20_28C__203__202_2931add__202_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403117111591301 ', ' (C- 3- 1)31aab- 1 ', 40.5219000000000023, -111.988602799999995, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1965-01-28 00:00:00', '2014-02-06 00:00:00', NULL, 12, '2014-02-06 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403117111591301&startDT=1965-01-28T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__203__201_2931aab__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__203__201_2931aab__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403127111240301 ', ' (D- 3- 5)29cac- 1 ', 40.5260652699999966, -111.400183400000003, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1936-10-27 00:00:00', '2014-03-13 00:00:00', NULL, 317, '2014-03-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403127111240301&startDT=1936-10-27T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__203__205_2929cac__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__203__205_2929cac__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '402946111233901 ', ' (D- 4- 5) 4ccb- 1 ', 40.4960658999999978, -111.394903799999994, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1989-04-21 00:00:00', '2013-08-28 00:00:00', NULL, 105, '2013-08-28 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=402946111233901&startDT=1989-04-21T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__204__205_29_204ccb__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__204__205_29_204ccb__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403146111272701 ', ' (D- 3- 4)26dba- 1 ', 40.5293988600000006, -111.458242999999996, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1966-07-01 00:00:00', '2013-08-27 00:00:00', NULL, 163, '2013-08-27 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403146111272701&startDT=1966-07-01T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__203__204_2926dba__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__203__204_2926dba__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403209111542701 ', ' (C- 3- 1)25bbb- 1 ', 40.5361374800000007, -111.908098800000005, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1965-02-18 00:00:00', '2012-08-24 00:00:00', NULL, 31, '2012-08-24 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403209111542701&startDT=1965-02-18T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__203__201_2925bbb__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__203__201_2925bbb__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403241112053301 ', ' (C- 3- 2)20bdd- 1 ', 40.544668999999999, -112.093273699999997, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2000-07-27 00:00:00', '2012-09-24 00:00:00', NULL, 56, '2012-09-24 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403241112053301&startDT=2000-07-27T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__203__202_2920bdd__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__203__202_2920bdd__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403241112053302 ', ' (C- 3- 2)20bdd- 2 ', 40.544668999999999, -112.093273699999997, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2000-07-27 00:00:00', '2012-09-24 00:00:00', NULL, 53, '2012-09-24 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403241112053302&startDT=2000-07-27T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__203__202_2920bdd__202_20_72019_6_72019%%22', NULL, 'usgs__20_28C__203__202_2920bdd__202_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403244111504701 ', ' (D- 3- 1)21bdc- 1 ', 40.5455416699999986, -111.846324999999993, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-10-04 00:00:00', '2013-11-01 00:00:00', NULL, 26, '2013-11-01 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403244111504701&startDT=1999-10-04T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__203__201_2921bdc__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__203__201_2921bdc__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403029112043401 ', ' (C- 3- 2)33cdd- 1 ', 40.5081805599999996, -112.072811000000002, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1998-07-28 00:00:00', '2014-02-06 00:00:00', NULL, 12, '2014-02-06 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403029112043401&startDT=1998-07-28T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__203__202_2933cdd__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__203__202_2933cdd__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403316111510601 ', ' (D- 3- 1)16ccb- 1 ', 40.5543694400000021, -111.851622199999994, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-09-02 00:00:00', '2013-11-01 00:00:00', NULL, 30, '2013-11-01 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403316111510601&startDT=1999-09-02T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__203__201_2916ccb__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__203__201_2916ccb__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403330111531601 ', ' (D- 3- 1)18cba- 1 ', 40.5582817399999982, -111.888545699999995, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1964-07-08 00:00:00', '2014-02-03 00:00:00', NULL, 1474, '2014-02-03 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403330111531601&startDT=1964-07-08T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__203__201_2918cba__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__203__201_2918cba__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403408111543201 ', ' (C- 3- 1)12ccb- 1 ', 40.5688370900000024, -111.909657499999994, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1931-09-11 00:00:00', '2014-02-03 00:00:00', NULL, 220, '2014-02-03 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403408111543201&startDT=1931-09-11T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__203__201_2912ccb__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__203__201_2912ccb__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403409111541501 ', ' (C- 3- 1)12cdb- 1 ', 40.5691148900000016, -111.904934999999995, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1990-11-13 00:00:00', '2014-02-03 00:00:00', NULL, 35, '2014-02-03 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403409111541501&startDT=1990-11-13T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__203__201_2912cdb__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__203__201_2912cdb__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403411111551001 ', ' (C- 3- 1)11cad- 1 ', 40.5696703699999972, -111.9202133, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1978-07-08 00:00:00', '2014-02-03 00:00:00', NULL, 38, '2014-02-03 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403411111551001&startDT=1978-07-08T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__203__201_2911cad__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__203__201_2911cad__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403129111510601 ', ' (D- 3- 1)28ccb- 1 ', 40.5248111000000009, -111.851658299999997, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-09-07 00:00:00', '2013-11-01 00:00:00', NULL, 31, '2013-11-01 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403129111510601&startDT=1999-09-07T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__203__201_2928ccb__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__203__201_2928ccb__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403420111530101 ', ' (D- 3- 1) 7cab- 1 ', 40.5723138899999967, -111.883724999999998, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-09-02 00:00:00', '2013-10-31 00:00:00', NULL, 31, '2013-10-31 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403420111530101&startDT=1999-09-02T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__203__201_29_207cab__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__203__201_29_207cab__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403458111534901 ', ' (C- 3- 1) 1dca- 1 ', 40.5832813999999971, -111.8979906, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1981-09-30 00:00:00', '2014-02-03 00:00:00', NULL, 35, '2014-02-03 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403458111534901&startDT=1981-09-30T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__203__201_29_201dca__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__203__201_29_201dca__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403503112011401 ', ' (C- 3- 2) 1cca- 1 ', 40.5841140699999983, -112.021327499999998, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-11-02 00:00:00', '2014-02-10 00:00:00', NULL, 15, '2014-02-10 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403503112011401&startDT=1999-11-02T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__203__202_29_201cca__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__203__202_29_201cca__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403511111541501 ', ' (C- 3- 1) 1cab- 2 ', 40.5863368999999992, -111.904935199999997, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1966-02-05 00:00:00', '2014-02-03 00:00:00', NULL, 281, '2014-02-03 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403511111541501&startDT=1966-02-05T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__203__201_29_201cab__202_20_72019_6_72019%%22', NULL, 'usgs__20_28C__203__201_29_201cab__202_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403524111512901 ', ' (D- 3- 1) 5aca- 1 LUS32 ', 40.5899416700000017, -111.858047200000001, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-08-05 00:00:00', '2014-01-28 00:00:00', NULL, 116, '2014-01-28 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403524111512901&startDT=1999-08-05T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__203__201_29_205aca__201_20LUS32_20_72019_6_72019%%22', NULL, 'usgs__20_28D__203__201_29_205aca__201_20LUS32_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403524111572201 ', ' (C- 3- 1) 4acb- 1 LUS33 ', 40.5900333000000018, -111.956038899999996, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-09-10 00:00:00', '2014-03-13 00:00:00', NULL, 107, '2014-03-13 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403524111572201&startDT=1999-09-10T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__203__201_29_204acb__201_20LUS33_20_72019_6_72019%%22', NULL, 'usgs__20_28C__203__201_29_204acb__201_20LUS33_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403527111572401 ', ' (C- 3- 1) 4bcb- 1 ', 40.5907810900000001, -111.957436799999996, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1981-04-23 00:00:00', '2014-02-06 00:00:00', NULL, 15, '2014-02-06 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403527111572401&startDT=1981-04-23T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__203__201_29_204bcb__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__203__201_29_204bcb__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403532111514001 ', ' (D- 3 - 1) 5abc- 1 ', 40.5924638899999977, -111.861236000000005, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2000-05-09 00:00:00', '2005-09-28 00:00:00', NULL, 18, '2005-09-28 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403532111514001&startDT=2000-05-09T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__203_20__201_29_205abc__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__203_20__201_29_205abc__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403533111570701 ', ' (C- 3- 1) 4aac- 1 ', 40.5924477599999989, -111.952714499999999, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1993-12-09 00:00:00', '1993-12-09 00:00:00', NULL, 1, '1993-12-09 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403533111570701&startDT=1993-12-09T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__203__201_29_204aac__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__203__201_29_204aac__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403544111584801 ', ' (C- 2- 1)32ccd- 1 ', 40.5956055600000028, -111.979966700000006, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-10-21 00:00:00', '2013-10-29 00:00:00', NULL, 32, '2013-10-29 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403544111584801&startDT=1999-10-21T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__201_2932ccd__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__201_2932ccd__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403547111514901 ', ' (D- 2- 1)32cdd- 3 ', 40.5963916700000027, -111.863861, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2000-05-09 00:00:00', '2001-07-20 00:00:00', NULL, 13, '2001-07-20 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403547111514901&startDT=2000-05-09T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__202__201_2932cdd__203_20_72019_6_72019%%22', NULL, 'usgs__20_28D__202__201_2932cdd__203_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403547111514902 ', ' (D- 2- 1)32cdd- 2 ', 40.5963971999999984, -111.863616699999994, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2000-05-09 00:00:00', '2001-07-20 00:00:00', NULL, 12, '2001-07-20 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403547111514902&startDT=2000-05-09T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__202__201_2932cdd__202_20_72019_6_72019%%22', NULL, 'usgs__20_28D__202__201_2932cdd__202_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403609111573901 ', ' (C- 2- 1)33bdc- 1 ', 40.602511100000001, -111.960836, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-09-10 00:00:00', '2013-10-29 00:00:00', NULL, 29, '2013-10-29 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403609111573901&startDT=1999-09-10T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__201_2933bdc__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__201_2933bdc__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403627111495701 ', ' (D- 2- 1)34bbc- 1 LUS26D ', 40.6075360999999972, -111.832735999999997, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-08-10 00:00:00', '2014-01-28 00:00:00', NULL, 112, '2014-01-28 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403627111495701&startDT=1999-08-10T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__202__201_2934bbc__201_20LUS26D_20_72019_6_72019%%22', NULL, 'usgs__20_28D__202__201_2934bbc__201_20LUS26D_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403627111495702 ', ' (D- 2- 1)34bbc- 2 ', 40.6075360999999972, -111.832735999999997, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-09-15 00:00:00', '2012-09-05 00:00:00', NULL, 92, '2012-09-05 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403627111495702&startDT=1999-09-15T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__202__201_2934bbc__202_20_72019_6_72019%%22', NULL, 'usgs__20_28D__202__201_2934bbc__202_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403628111514301 ', ' (D- 2- 1)32abb- 1 LUS27 ', 40.6077444399999976, -111.861933300000004, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-09-07 00:00:00', '2014-01-28 00:00:00', NULL, 110, '2014-01-28 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403628111514301&startDT=1999-09-07T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__202__201_2932abb__201_20LUS27_20_72019_6_72019%%22', NULL, 'usgs__20_28D__202__201_2932abb__201_20LUS27_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403637112005201 ', ' (C- 2- 2)25cdd- 1 ', 40.6102249799999981, -112.0152164, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1977-01-01 00:00:00', '1977-01-01 00:00:00', NULL, 1, '1977-01-01 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403637112005201&startDT=1977-01-01T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__202_2925cdd__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__202_2925cdd__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403638111505101 ', ' (D- 2- 1)28ccd- 1 ', 40.6104388899999975, -111.847425000000001, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-09-07 00:00:00', '2013-10-31 00:00:00', NULL, 32, '2013-10-31 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403638111505101&startDT=1999-09-07T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__202__201_2928ccd__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__202__201_2928ccd__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403648111501801 ', ' (D- 2- 1)28dbd- 1 ', 40.6134805599999993, -111.838477800000007, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2000-05-09 00:00:00', '2001-08-23 00:00:00', NULL, 18, '2001-08-23 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403648111501801&startDT=2000-05-09T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__202__201_2928dbd__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__202__201_2928dbd__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403648111504401 ', ' (D- 2- 1)28cac- 1 ', 40.6134582999999978, -111.845825000000005, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2000-05-09 00:00:00', '2001-08-23 00:00:00', NULL, 17, '2001-08-23 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403648111504401&startDT=2000-05-09T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__202__201_2928cac__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__202__201_2928cac__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403652111570201 ', ' (C- 2- 1)28dac- 1 ', 40.6145027800000022, -111.950536, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-09-01 00:00:00', '2013-10-29 00:00:00', NULL, 33, '2013-10-29 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403652111570201&startDT=1999-09-01T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__201_2928dac__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__201_2928dac__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403653111502501 ', ' (D- 2- 1)28dbc- 1 ', 40.6149166699999995, -111.840361000000001, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2000-05-09 00:00:00', '2001-08-23 00:00:00', NULL, 19, '2001-08-23 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403653111502501&startDT=2000-05-09T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__202__201_2928dbc__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__202__201_2928dbc__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403653111504301 ', ' (D- 2- 1)28cac- 2 ', 40.6147500000000008, -111.845511000000002, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2000-05-09 00:00:00', '2001-08-23 00:00:00', NULL, 17, '2001-08-23 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403653111504301&startDT=2000-05-09T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__202__201_2928cac__202_20_72019_6_72019%%22', NULL, 'usgs__20_28D__202__201_2928cac__202_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403656111503401 ', ' (D- 2- 1)28caa- 1 ', 40.6156750000000031, -111.842991699999999, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2000-05-09 00:00:00', '2001-08-23 00:00:00', NULL, 17, '2001-08-23 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403656111503401&startDT=2000-05-09T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__202__201_2928caa__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__202__201_2928caa__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403821112012400 ', ' (C- 2- 2)13cda- 1 ', 40.6414444400000008, -112.015638899999999, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1973-04-01 00:00:00', '2014-02-10 00:00:00', NULL, 14, '2014-02-10 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403821112012400&startDT=1973-04-01T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__202_2913cda__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__202_2913cda__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403656111503402 ', ' (D- 2- 1)28caa- 2 ', 40.6156777799999986, -111.843011000000004, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2000-05-09 00:00:00', '2001-08-23 00:00:00', NULL, 17, '2001-08-23 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403656111503402&startDT=2000-05-09T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__202__201_2928caa__202_20_72019_6_72019%%22', NULL, 'usgs__20_28D__202__201_2928caa__202_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403657111504201 ', ' (D- 2- 1)28caa- 3 ', 40.6158610999999965, -111.844999999999999, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2000-05-09 00:00:00', '2001-07-19 00:00:00', NULL, 13, '2001-07-19 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403657111504201&startDT=2000-05-09T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__202__201_2928caa__203_20_72019_6_72019%%22', NULL, 'usgs__20_28D__202__201_2928caa__203_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403828111590401 ', ' (C- 2- 1)17ccb- 1 ', 40.6410332999999966, -111.984327800000003, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-08-30 00:00:00', '2013-10-28 00:00:00', NULL, 32, '2013-10-28 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403828111590401&startDT=1999-08-30T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__201_2917ccb__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__201_2917ccb__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403833111532801 ', ' (C- 2- 1)13dad- 1 ', 40.6424474000000018, -111.892990900000001, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1981-10-27 00:00:00', '2014-02-05 00:00:00', NULL, 142, '2014-02-05 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403833111532801&startDT=1981-10-27T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__201_2913dad__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__201_2913dad__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403833111532802 ', ' (C- 2- 1)13dad- 2 ', 40.6424474000000018, -111.891879700000004, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1975-07-30 00:00:00', '2014-02-05 00:00:00', NULL, 133, '2014-02-05 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403833111532802&startDT=1975-07-30T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__201_2913dad__202_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__201_2913dad__202_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403907112073901 ', ' (C- 2- 3)13aba- 1 ', 40.6518900000000016, -112.128275700000003, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2000-07-18 00:00:00', '2003-12-19 00:00:00', NULL, 11, '2003-12-19 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403907112073901&startDT=2000-07-18T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__203_2913aba__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__203_2913aba__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403945111565401 ', ' (C- 2- 1)10bcb- 1 ', 40.6626027800000003, -111.948197199999996, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-08-25 00:00:00', '2013-10-28 00:00:00', NULL, 32, '2013-10-28 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403945111565401&startDT=1999-08-25T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__201_2910bcb__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__201_2910bcb__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403949112043301 ', ' (C- 2- 2) 9bdb- 1 ', 40.6635570999999985, -112.076607600000003, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1966-01-12 00:00:00', '2014-02-07 00:00:00', NULL, 573, '2014-02-07 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403949112043301&startDT=1966-01-12T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__202_29_209bdb__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__202_29_209bdb__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '404007111585801 ', ' (C- 2- 1) 5ccd- 1 LUS5 ', 40.6685527799999988, -111.982813899999996, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-08-25 00:00:00', '2014-04-04 00:00:00', NULL, 114, '2014-04-04 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=404007111585801&startDT=1999-08-25T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__201_29_205ccd__201_20LUS5_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__201_29_205ccd__201_20LUS5_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '404012111572101 ', ' (C- 2- 1) 4dcb- 1 ', 40.6699694400000027, -111.955922200000003, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-08-19 00:00:00', '2013-10-28 00:00:00', NULL, 32, '2013-10-28 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=404012111572101&startDT=1999-08-19T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__201_29_204dcb__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__201_29_204dcb__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403915111565501 ', ' (C- 2- 1) 9ddd- 1 ', 40.6541805600000004, -111.9485806, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-10-13 00:00:00', '2013-10-28 00:00:00', NULL, 31, '2013-10-28 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403915111565501&startDT=1999-10-13T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__201_29_209ddd__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__201_29_209ddd__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403916111575901 ', ' (C- 2- 1) 9ccc- 1 ', 40.6543914399999977, -111.967159899999999, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1966-04-05 00:00:00', '2014-04-08 00:00:00', NULL, 2807, '2014-04-08 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403916111575901&startDT=1966-04-05T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__201_29_209ccc__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__201_29_209ccc__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '404045111594201 ', ' (C- 2- 1) 6abc- 4 ', 40.6791277800000017, -111.995911000000007, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1947-04-13 00:00:00', '2011-09-22 00:00:00', NULL, 27, '2011-09-22 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=404045111594201&startDT=1947-04-13T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__201_29_206abc__204_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__201_29_206abc__204_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '404051111513102 ', ' (D- 2- 1) 5aba- 2 ', 40.6807804000000033, -111.859379000000004, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1964-06-02 00:00:00', '2014-02-03 00:00:00', NULL, 151, '2014-02-03 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=404051111513102&startDT=1964-06-02T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__202__201_29_205aba__202_20_72019_6_72019%%22', NULL, 'usgs__20_28D__202__201_29_205aba__202_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '404055111485001 ', ' (D- 2- 1) 2bbb- 3 ', 40.6818916900000005, -111.814655500000001, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1981-10-02 00:00:00', '2014-02-03 00:00:00', NULL, 54, '2014-02-03 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=404055111485001&startDT=1981-10-02T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__202__201_29_202bbb__203_20_72019_6_72019%%22', NULL, 'usgs__20_28D__202__201_29_202bbb__203_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '404104111582101 ', ' (C- 1- 1)32dcd- 1 ', 40.6844055600000019, -111.972566700000002, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-08-23 00:00:00', '2013-10-28 00:00:00', NULL, 31, '2013-10-28 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=404104111582101&startDT=1999-08-23T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__201__201_2932dcd__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__201__201_2932dcd__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403940112051401 ', ' (C- 2- 2) 8acd- 1 ', 40.6610569999999996, -112.087996799999999, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1981-09-04 00:00:00', '2014-02-07 00:00:00', NULL, 45, '2014-02-07 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403940112051401&startDT=1981-09-04T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__202_29_208acd__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__202_29_208acd__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403943111575701 ', ' (C- 2- 1) 9bcc- 1 ', 40.6620194400000017, -111.965941700000002, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-08-30 00:00:00', '2013-10-28 00:00:00', NULL, 31, '2013-10-28 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403943111575701&startDT=1999-08-30T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__201_29_209bcc__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__201_29_209bcc__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '404253111530901 ', ' (D- 1- 1)19cdb-17 ', 40.7146687999999983, -111.886602300000007, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1956-05-29 00:00:00', '2014-02-05 00:00:00', NULL, 185, '2014-02-05 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=404253111530901&startDT=1956-05-29T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__201__201_2919cdb_17_20_72019_6_72019%%22', NULL, 'usgs__20_28D__201__201_2919cdb_17_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '404356111503901 ', ' (D- 1- 1)16caa- 1 ', 40.732446539999998, -111.844934499999994, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1934-09-07 00:00:00', '2014-02-04 00:00:00', NULL, 3453, '2014-02-04 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=404356111503901&startDT=1934-09-07T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__201__201_2916caa__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__201__201_2916caa__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '404438111494001 ', ' (D- 1- 1)10cac- 1 ', 40.7438353599999985, -111.828545199999994, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1934-10-01 00:00:00', '2014-02-04 00:00:00', NULL, 690, '2014-02-04 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=404438111494001&startDT=1934-10-01T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__201__201_2910cac__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__201__201_2910cac__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '404506111523301 ', ' (D- 1- 1) 7abd- 6 ', 40.7518555600000028, -111.877010999999996, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1931-10-02 00:00:00', '2014-02-05 00:00:00', NULL, 389, '2014-02-05 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=404506111523301&startDT=1931-10-02T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__201__201_29_207abd__206_20_72019_6_72019%%22', NULL, 'usgs__20_28D__201__201_29_207abd__206_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '404022111580801 ', ' (C- 2- 1) 5dac- 2 ', 40.6727082999999965, -111.968811000000002, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-09-01 00:00:00', '2013-10-28 00:00:00', NULL, 32, '2013-10-28 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=404022111580801&startDT=1999-09-01T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__201_29_205dac__202_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__201_29_205dac__202_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '404044111572701 ', ' (C- 2- 1) 4abc- 1 ', 40.6789971999999977, -111.957624999999993, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-08-23 00:00:00', '2013-10-28 00:00:00', NULL, 31, '2013-10-28 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=404044111572701&startDT=1999-08-23T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__201_29_204abc__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__201_29_204abc__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '404704112060401 ', ' (B- 1- 2)29ccc- 1 ', 40.784389179999998, -112.101887399999995, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1998-02-18 00:00:00', '2014-02-04 00:00:00', NULL, 18, '2014-02-04 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=404704112060401&startDT=1998-02-18T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28B__201__202_2929ccc__201_20_72019_6_72019%%22', NULL, 'usgs__20_28B__201__202_2929ccc__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '404720111562701 ', ' (B- 1- 1)27cac- 1 ', 40.7888345400000034, -111.941604799999993, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2000-02-09 00:00:00', '2014-02-04 00:00:00', NULL, 14, '2014-02-04 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=404720111562701&startDT=2000-02-09T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28B__201__201_2927cac__201_20_72019_6_72019%%22', NULL, 'usgs__20_28B__201__201_2927cac__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '404826112062201 ', ' (B- 1- 2)19aca- 1 ', 40.8071666800000017, -112.106887799999996, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1964-09-01 00:00:00', '2014-02-05 00:00:00', NULL, 58, '2014-02-05 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=404826112062201&startDT=1964-09-01T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28B__201__202_2919aca__201_20_72019_6_72019%%22', NULL, 'usgs__20_28B__201__202_2919aca__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '405135111531501 ', ' (A- 2- 1)31cca- 1 ', 40.8596675000000005, -111.888270899999995, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1946-10-14 00:00:00', '2014-03-06 00:00:00', NULL, 144, '2014-03-06 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=405135111531501&startDT=1946-10-14T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28A__202__201_2931cca__201_20_72019_6_72019%%22', NULL, 'usgs__20_28A__202__201_2931cca__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '404152111525101 ', ' (D- 1- 1)30cda-10 ', 40.6977246000000008, -111.881602000000001, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1956-10-26 00:00:00', '2014-02-03 00:00:00', NULL, 125, '2014-02-03 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=404152111525101&startDT=1956-10-26T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__201__201_2930cda_10_20_72019_6_72019%%22', NULL, 'usgs__20_28D__201__201_2930cda_10_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '404202112064701 ', ' (C- 1- 2)30cac- 1 ', 40.7005009000000015, -112.113831300000001, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2000-07-17 00:00:00', '2006-07-03 00:00:00', NULL, 20, '2006-07-03 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=404202112064701&startDT=2000-07-17T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__201__202_2930cac__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__201__202_2930cac__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '413609111495701 ', ' (A-10- 1)16dad- 1 ', 41.6024337699999975, -111.833275499999999, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1968-09-26 00:00:00', '2014-04-16 00:00:00', NULL, 166, '2014-04-16 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=413609111495701&startDT=1968-09-26T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28A_10__201_2916dad__201_20_72019_6_72019%%22', NULL, 'usgs__20_28A_10__201_2916dad__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '413805111492201 ', ' (A-10- 1) 3bdd- 1 ', 41.6345719899999978, -111.823636699999994, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1996-05-01 00:00:00', '2014-04-07 00:00:00', NULL, 86, '2014-04-07 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=413805111492201&startDT=1996-05-01T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28A_10__201_29_203bdd__201_20_72019_6_72019%%22', NULL, 'usgs__20_28A_10__201_29_203bdd__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '413840111552601 ', ' (B-11- 1)35cca- 1 ', 41.6443764699999974, -111.924667299999996, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1966-05-16 00:00:00', '2014-04-16 00:00:00', NULL, 124, '2014-04-16 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=413840111552601&startDT=1966-05-16T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28B_11__201_2935cca__201_20_72019_6_72019%%22', NULL, 'usgs__20_28B_11__201_2935cca__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '413924111493501 ', ' (A-11- 1)27cdc- 1 ', 41.6565992000000023, -111.827164800000006, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1967-02-28 00:00:00', '2014-04-16 00:00:00', NULL, 162, '2014-04-16 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=413924111493501&startDT=1967-02-28T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28A_11__201_2927cdc__201_20_72019_6_72019%%22', NULL, 'usgs__20_28A_11__201_2927cdc__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '404531111510101 ', ' (D- 1- 1) 4cbc- 1 ', 40.7585573599999975, -111.851045999999997, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1934-10-15 00:00:00', '2014-02-04 00:00:00', NULL, 506, '2014-02-04 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=404531111510101&startDT=1934-10-15T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__201__201_29_204cbc__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__201__201_29_204cbc__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '404627111532601 ', ' (A- 1- 1)31cac- 1 ', 40.774116669999998, -111.885877800000003, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1943-07-27 00:00:00', '2014-02-04 00:00:00', NULL, 173, '2014-02-04 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=404627111532601&startDT=1943-07-27T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28A__201__201_2931cac__201_20_72019_6_72019%%22', NULL, 'usgs__20_28A__201__201_2931cac__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '414026111500601 ', ' (A-11- 1)21ddb- 1 ', 41.6738209600000005, -111.835776300000006, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1982-09-08 00:00:00', '2010-03-02 00:00:00', NULL, 93, '2010-03-02 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=414026111500601&startDT=1982-09-08T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28A_11__201_2921ddb__201_20_72019_6_72019%%22', NULL, 'usgs__20_28A_11__201_2921ddb__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '414038111513701 ', ' (A-11- 1)20dbb- 1 ', 41.6771261999999965, -111.861138100000005, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1978-03-16 00:00:00', '2014-04-16 00:00:00', NULL, 75, '2014-04-16 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=414038111513701&startDT=1978-03-16T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28A_11__201_2920dbb__201_20_72019_6_72019%%22', NULL, 'usgs__20_28A_11__201_2920dbb__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '414134111544701 ', ' (B-11- 1)14adc- 2 ', 41.6927086000000031, -111.913834199999997, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1968-01-19 00:00:00', '2014-04-16 00:00:00', NULL, 85, '2014-04-16 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=414134111544701&startDT=1968-01-19T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28B_11__201_2914adc__202_20_72019_6_72019%%22', NULL, 'usgs__20_28B_11__201_2914adc__202_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '414141111493601 ', ' (A-11- 1)15bdb- 1 S29 ', 41.6947038899999995, -111.827517999999998, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1991-03-27 00:00:00', '2012-08-22 00:00:00', NULL, 30, '2012-08-22 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=414141111493601&startDT=1991-03-27T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28A_11__201_2915bdb__201_20S29_20_72019_6_72019%%22', NULL, 'usgs__20_28A_11__201_2915bdb__201_20S29_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '414209111574001 ', ' (B-11- 1) 9cdb- 1 ', 41.7024301000000008, -111.961891199999997, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1955-06-12 00:00:00', '2014-04-16 00:00:00', NULL, 182, '2014-04-16 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=414209111574001&startDT=1955-06-12T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28B_11__201_29_209cdb__201_20_72019_6_72019%%22', NULL, 'usgs__20_28B_11__201_29_209cdb__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '414216111511001 ', ' (A-11- 1) 8dda- 3 ', 41.7043755999999988, -111.853554900000006, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1935-11-01 00:00:00', '1969-07-01 00:00:00', NULL, 151, '1969-07-01 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=414216111511001&startDT=1935-11-01T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28A_11__201_29_208dda__203_20_72019_6_72019%%22', NULL, 'usgs__20_28A_11__201_29_208dda__203_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '414650111560201 ', ' (B-12- 1)15adc- 1 ', 41.7804841999999965, -111.934669299999996, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1990-04-19 00:00:00', '2005-09-29 00:00:00', NULL, 39, '2005-09-29 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=414650111560201&startDT=1990-04-19T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28B_12__201_2915adc__201_20_72019_6_72019%%22', NULL, 'usgs__20_28B_12__201_2915adc__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '414316111500101 ', ' (A-11- 1) 4daa- 2 ', 41.7210420899999974, -111.834387899999996, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1962-12-22 00:00:00', '2007-09-27 00:00:00', NULL, 228, '2007-09-27 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=414316111500101&startDT=1962-12-22T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28A_11__201_29_204daa__202_20_72019_6_72019%%22', NULL, 'usgs__20_28A_11__201_29_204daa__202_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '414409111523502 ', ' (A-12- 1)31dab- 2 ', 41.7357634800000028, -111.877167, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1977-03-09 00:00:00', '2013-06-05 00:00:00', NULL, 114, '2013-06-05 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=414409111523502&startDT=1977-03-09T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28A_12__201_2931dab__202_20_72019_6_72019%%22', NULL, 'usgs__20_28A_12__201_2931dab__202_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '414444111553101 ', ' (B-12- 1)26cca- 1 ', 41.7454848900000002, -111.926057299999997, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1944-07-22 00:00:00', '2005-12-29 00:00:00', NULL, 67, '2005-12-29 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=414444111553101&startDT=1944-07-22T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28B_12__201_2926cca__201_20_72019_6_72019%%22', NULL, 'usgs__20_28B_12__201_2926cca__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '414501111520001 ', ' (A-12- 1)29cab- 1 ', 41.7502076800000026, -111.867444599999999, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1940-08-25 00:00:00', '2012-04-18 00:00:00', NULL, 2946, '2012-04-18 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=414501111520001&startDT=1940-08-25T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28A_12__201_2929cab__201_20_72019_6_72019%%22', NULL, 'usgs__20_28A_12__201_2929cab__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '414642111511401 ', ' (A-12- 1)17daa- 1 ', 41.7782629499999985, -111.854667000000006, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1967-08-29 00:00:00', '2014-04-16 00:00:00', NULL, 129, '2014-04-16 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=414642111511401&startDT=1967-08-29T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28A_12__201_2917daa__201_20_72019_6_72019%%22', NULL, 'usgs__20_28A_12__201_2917daa__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403659111580501 ', ' (C- 2- 1)29daa- 1 ', 40.6163888900000032, -111.968163899999993, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-09-24 00:00:00', '2013-11-01 00:00:00', NULL, 30, '2013-11-01 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403659111580501&startDT=1999-09-24T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__201_2929daa__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__201_2929daa__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403713111501901 ', ' (D- 2- 1)28aca- 1 ', 40.6202444400000005, -111.838635999999994, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-09-29 00:00:00', '2013-10-29 00:00:00', NULL, 29, '2013-10-29 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403713111501901&startDT=1999-09-29T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__202__201_2928aca__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__202__201_2928aca__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403729111581701 ', ' (C- 2- 1)20ddc- 1 ', 40.6246721999999991, -111.971372200000005, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-10-05 00:00:00', '2013-10-28 00:00:00', NULL, 30, '2013-10-28 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403729111581701&startDT=1999-10-05T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__201_2920ddc__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__201_2920ddc__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403914111560101 ', ' (C- 2- 1)10ddc- 1 ', 40.6537860999999978, -111.9337333, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-08-26 00:00:00', '2013-10-28 00:00:00', NULL, 30, '2013-10-28 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403914111560101&startDT=1999-08-26T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__201_2910ddc__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__201_2910ddc__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403914111580201 ', ' (C- 2- 1) 9ccc- 3 ', 40.653897200000003, -111.967141699999999, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-10-06 00:00:00', '2013-10-28 00:00:00', NULL, 31, '2013-10-28 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403914111580201&startDT=1999-10-06T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__201_29_209ccc__203_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__201_29_209ccc__203_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403730111563201 ', ' (C- 2- 1)22cdc- 1 ', 40.624969440000001, -111.942158300000003, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-09-01 00:00:00', '2013-10-29 00:00:00', NULL, 31, '2013-10-29 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403730111563201&startDT=1999-09-01T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28C__202__201_2922cdc__201_20_72019_6_72019%%22', NULL, 'usgs__20_28C__202__201_2922cdc__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403736111521401 ', ' (D- 2- 1)20ccb- 2 ', 40.6265666700000025, -111.870444399999997, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1999-09-07 00:00:00', '2013-10-29 00:00:00', NULL, 29, '2013-10-29 00:00:00', 0, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403736111521401&startDT=1999-09-07T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__202__201_2920ccb__202_20_72019_6_72019%%22', NULL, 'usgs__20_28D__202__201_2920ccb__202_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403742111503201 ', ' (D- 2- 1)21dbc- 1 ', 40.6285833000000025, -111.843166699999998, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1961-09-20 00:00:00', '2014-02-05 00:00:00', NULL, 154, '2014-02-05 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403742111503201&startDT=1961-09-20T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__202__201_2921dbc__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__202__201_2921dbc__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (6, 'USGS Groundwater', '403803111505301 ', ' (D- 2- 1)21bca- 1 ', 40.6340694399999975, -111.8489, 'Utah ', NULL, 'Well', '72019', 'Depth to water level, feet below land surface', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Sporadic', 'Physical ', 0, NULL, NULL, NULL, 'USGS-A', 'Approved', 'Data are approved by the USGS.', 'USGS', 'Data retrieved from USGS NWIS', '1979-01-29 00:00:00', '2014-02-03 00:00:00', NULL, 244, '2014-02-03 00:00:00', 1, 'http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=403803111505301&startDT=1979-01-29T00:0¶meterCd=72019', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs__20_28D__202__201_2921bca__201_20_72019_6_72019%%22', NULL, 'usgs__20_28D__202__201_2921bca__201_20_72019_6_72019', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10172200 ', 'RED BUTTE CREEK AT FORT DOUGLAS, NEAR SLC, UT ', 40.7799462700000035, -111.806044999999997, 'Utah ', NULL, 'Stream', '10', 'Temperature, water, degrees Celsius', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'deg C', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2012-02-03 00:00:00', '2014-05-14 00:00:00', -7, 303315, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10172200&startDT=2012-02-03T00:0¶meterCd=00010', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_RED_20BUTTE_20CREEK_20AT_20FORT_20DOUGLAS_2C_20NEAR_20SLC_2C_20UT_20_10_4_00010%%22', NULL, 'usgs_RED_20BUTTE_20CREEK_20AT_20FORT_20DOUGLAS_2C_20NEAR_20SLC_2C_20UT_20_10_4_00010', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10113500 ', 'BLACKSMITH FORK AB UP and L CO.''S DAM NR HYRUM, UT ', 41.6235452000000024, -111.738828600000005, 'Utah ', NULL, 'Stream', '45', 'Precipitation, total, inches', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'in', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10113500&startDT=2007-10-01T00:0¶meterCd=00045', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_BLACKSMITH_20FORK_20AB_20UP_20and_20L_20CO__27S_20DAM_20NR_20HYRUM_2C_20UT_20_45_4_00045%%22', NULL, 'usgs_BLACKSMITH_20FORK_20AB_20UP_20and_20L_20CO__27S_20DAM_20NR_20HYRUM_2C_20UT_20_45_4_00045', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10105900 ', 'LITTLE BEAR RIVER AT PARADISE, UT ', 41.5754897999999997, -111.855220299999999, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10105900&startDT=2007-10-01T00:0¶meterCd=00060', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_LITTLE_20BEAR_20RIVER_20AT_20PARADISE_2C_20UT_20_60_4_00060%%22', NULL, 'usgs_LITTLE_20BEAR_20RIVER_20AT_20PARADISE_2C_20UT_20_60_4_00060', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10108400 ', 'CACHE HIGHLINE CANAL NEAR LOGAN, UTAH ', 41.742986860000002, -111.761886000000004, 'Utah ', NULL, 'Canal', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10108400&startDT=2007-10-01T00:0¶meterCd=00060', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_CACHE_20HIGHLINE_20CANAL_20NEAR_20LOGAN_2C_20UTAH_20_60_4_00060%%22', NULL, 'usgs_CACHE_20HIGHLINE_20CANAL_20NEAR_20LOGAN_2C_20UTAH_20_60_4_00060', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10109000 ', 'LOGAN RIVER ABOVE STATE DAM, NEAR LOGAN, UT ', 41.7432643900000002, -111.782719999999998, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10109000&startDT=2007-10-01T00:0¶meterCd=00060', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_LOGAN_20RIVER_20ABOVE_20STATE_20DAM_2C_20NEAR_20LOGAN_2C_20UT_20_60_4_00060%%22', NULL, 'usgs_LOGAN_20RIVER_20ABOVE_20STATE_20DAM_2C_20NEAR_20LOGAN_2C_20UT_20_60_4_00060', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10113500 ', 'BLACKSMITH FORK AB UP and L CO.''S DAM NR HYRUM, UT ', 41.6235452000000024, -111.738828600000005, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10113500&startDT=2007-10-01T00:0¶meterCd=00060', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_BLACKSMITH_20FORK_20AB_20UP_20and_20L_20CO__27S_20DAM_20NR_20HYRUM_2C_20UT_20_60_4_00060%%22', NULL, 'usgs_BLACKSMITH_20FORK_20AB_20UP_20and_20L_20CO__27S_20DAM_20NR_20HYRUM_2C_20UT_20_60_4_00060', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10154200 ', 'PROVO RIVER NEAR WOODLAND, UT ', 40.5577278000000021, -111.1687838, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10154200&startDT=2007-10-01T00:0¶meterCd=00060', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_PROVO_20RIVER_20NEAR_20WOODLAND_2C_20UT_20_60_4_00060%%22', NULL, 'usgs_PROVO_20RIVER_20NEAR_20WOODLAND_2C_20UT_20_60_4_00060', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10155000 ', 'PROVO RIVER NEAR HAILSTONE, UT ', 40.6007855499999977, -111.331571999999994, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10155000&startDT=2007-10-01T00:0¶meterCd=00060', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_PROVO_20RIVER_20NEAR_20HAILSTONE_2C_20UT_20_60_4_00060%%22', NULL, 'usgs_PROVO_20RIVER_20NEAR_20HAILSTONE_2C_20UT_20_60_4_00060', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10168000 ', 'LITTLE COTTONWOOD CREEK @ JORDAN RIVER NR SLC ', 40.6638360000000034, -111.901880199999994, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10168000&startDT=2007-10-01T00:0¶meterCd=00060', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20JORDAN_20RIVER_20NR_20SLC_20_60_4_00060%%22', NULL, 'usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20JORDAN_20RIVER_20NR_20SLC_20_60_4_00060', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10170500 ', 'SURPLUS CANAL @ SALT LAKE CITY, UT ', 40.7268907600000034, -111.926603700000001, 'Utah ', NULL, 'Canal', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10170500&startDT=2007-10-01T00:0¶meterCd=00060', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_SURPLUS_20CANAL_20_40_20SALT_20LAKE_20CITY_2C_20UT_20_60_4_00060%%22', NULL, 'usgs_SURPLUS_20CANAL_20_40_20SALT_20LAKE_20CITY_2C_20UT_20_60_4_00060', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10171000 ', 'JORDAN RIVER @ 1700 SOUTH @ SALT LAKE CITY, UT ', 40.7335573699999998, -111.923270299999999, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10171000&startDT=2007-10-01T00:0¶meterCd=00060', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_JORDAN_20RIVER_20_40_201700_20SOUTH_20_40_20SALT_20LAKE_20CITY_2C_20UT_20_60_4_00060%%22', NULL, 'usgs_JORDAN_20RIVER_20_40_201700_20SOUTH_20_40_20SALT_20LAKE_20CITY_2C_20UT_20_60_4_00060', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10172200 ', 'RED BUTTE CREEK AT FORT DOUGLAS, NEAR SLC, UT ', 40.7799462700000035, -111.806044999999997, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10172200&startDT=2007-10-01T00:0¶meterCd=00060', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_RED_20BUTTE_20CREEK_20AT_20FORT_20DOUGLAS_2C_20NEAR_20SLC_2C_20UT_20_60_4_00060%%22', NULL, 'usgs_RED_20BUTTE_20CREEK_20AT_20FORT_20DOUGLAS_2C_20NEAR_20SLC_2C_20UT_20_60_4_00060', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10172630 ', 'GOGGIN DRAIN NEAR MAGNA UTAH ', 40.8166110700000004, -112.100776699999997, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10172630&startDT=2007-10-01T00:0¶meterCd=00060', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_GOGGIN_20DRAIN_20NEAR_20MAGNA_20UTAH_20_60_4_00060%%22', NULL, 'usgs_GOGGIN_20DRAIN_20NEAR_20MAGNA_20UTAH_20_60_4_00060', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10155200 ', 'PROVO RIV AT RIV ROAD BRIDGE NR HEBER CITY, UT ', 40.5543980500000032, -111.4332426, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10155200&startDT=2007-10-01T00:0¶meterCd=00060', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_PROVO_20RIV_20AT_20RIV_20ROAD_20BRIDGE_20NR_20HEBER_20CITY_2C_20UT_20_60_4_00060%%22', NULL, 'usgs_PROVO_20RIV_20AT_20RIV_20ROAD_20BRIDGE_20NR_20HEBER_20CITY_2C_20UT_20_60_4_00060', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10155500 ', 'PROVO RIVER NEAR CHARLESTON, UT ', 40.4841221000000004, -111.4635198, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10155500&startDT=2007-10-01T00:0¶meterCd=00060', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_PROVO_20RIVER_20NEAR_20CHARLESTON_2C_20UT_20_60_4_00060%%22', NULL, 'usgs_PROVO_20RIVER_20NEAR_20CHARLESTON_2C_20UT_20_60_4_00060', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10156000 ', 'SNAKE CREEK NEAR CHARLESTON, UT ', 40.4852332000000033, -111.467131199999997, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10156000&startDT=2007-10-01T00:0¶meterCd=00060', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_SNAKE_20CREEK_20NEAR_20CHARLESTON_2C_20UT_20_60_4_00060%%22', NULL, 'usgs_SNAKE_20CREEK_20NEAR_20CHARLESTON_2C_20UT_20_60_4_00060', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10157500 ', 'DANIELS CREEK AT CHARLESTON, UT ', 40.4607890899999987, -111.472686999999993, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10157500&startDT=2007-10-01T00:0¶meterCd=00060', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_DANIELS_20CREEK_20AT_20CHARLESTON_2C_20UT_20_60_4_00060%%22', NULL, 'usgs_DANIELS_20CREEK_20AT_20CHARLESTON_2C_20UT_20_60_4_00060', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10163000 ', 'PROVO RIVER AT PROVO, UT ', 40.2377317999999988, -111.699370000000002, 'Utah ', NULL, 'Stream', '60', 'Discharge, cubic feet per second', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft3/s', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-11-14 00:00:00', '2014-05-14 00:00:00', -7, 866145, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10163000&startDT=2007-11-14T00:0¶meterCd=00060', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_PROVO_20RIVER_20AT_20PROVO_2C_20UT_20_60_4_00060%%22', NULL, 'usgs_PROVO_20RIVER_20AT_20PROVO_2C_20UT_20_60_4_00060', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10105900 ', 'LITTLE BEAR RIVER AT PARADISE, UT ', 41.5754897999999997, -111.855220299999999, 'Utah ', NULL, 'Stream', '65', 'Gage height, feet', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10105900&startDT=2007-10-01T00:0¶meterCd=00065', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_LITTLE_20BEAR_20RIVER_20AT_20PARADISE_2C_20UT_20_65_4_00065%%22', NULL, 'usgs_LITTLE_20BEAR_20RIVER_20AT_20PARADISE_2C_20UT_20_65_4_00065', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10108400 ', 'CACHE HIGHLINE CANAL NEAR LOGAN, UTAH ', 41.742986860000002, -111.761886000000004, 'Utah ', NULL, 'Canal', '65', 'Gage height, feet', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10108400&startDT=2007-10-01T00:0¶meterCd=00065', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_CACHE_20HIGHLINE_20CANAL_20NEAR_20LOGAN_2C_20UTAH_20_65_4_00065%%22', NULL, 'usgs_CACHE_20HIGHLINE_20CANAL_20NEAR_20LOGAN_2C_20UTAH_20_65_4_00065', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10109000 ', 'LOGAN RIVER ABOVE STATE DAM, NEAR LOGAN, UT ', 41.7432643900000002, -111.782719999999998, 'Utah ', NULL, 'Stream', '65', 'Gage height, feet', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10109000&startDT=2007-10-01T00:0¶meterCd=00065', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_LOGAN_20RIVER_20ABOVE_20STATE_20DAM_2C_20NEAR_20LOGAN_2C_20UT_20_65_4_00065%%22', NULL, 'usgs_LOGAN_20RIVER_20ABOVE_20STATE_20DAM_2C_20NEAR_20LOGAN_2C_20UT_20_65_4_00065', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10113500 ', 'BLACKSMITH FORK AB UP and L CO.''S DAM NR HYRUM, UT ', 41.6235452000000024, -111.738828600000005, 'Utah ', NULL, 'Stream', '65', 'Gage height, feet', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10113500&startDT=2007-10-01T00:0¶meterCd=00065', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_BLACKSMITH_20FORK_20AB_20UP_20and_20L_20CO__27S_20DAM_20NR_20HYRUM_2C_20UT_20_65_4_00065%%22', NULL, 'usgs_BLACKSMITH_20FORK_20AB_20UP_20and_20L_20CO__27S_20DAM_20NR_20HYRUM_2C_20UT_20_65_4_00065', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10154200 ', 'PROVO RIVER NEAR WOODLAND, UT ', 40.5577278000000021, -111.1687838, 'Utah ', NULL, 'Stream', '65', 'Gage height, feet', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10154200&startDT=2007-10-01T00:0¶meterCd=00065', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_PROVO_20RIVER_20NEAR_20WOODLAND_2C_20UT_20_65_4_00065%%22', NULL, 'usgs_PROVO_20RIVER_20NEAR_20WOODLAND_2C_20UT_20_65_4_00065', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10155000 ', 'PROVO RIVER NEAR HAILSTONE, UT ', 40.6007855499999977, -111.331571999999994, 'Utah ', NULL, 'Stream', '65', 'Gage height, feet', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10155000&startDT=2007-10-01T00:0¶meterCd=00065', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_PROVO_20RIVER_20NEAR_20HAILSTONE_2C_20UT_20_65_4_00065%%22', NULL, 'usgs_PROVO_20RIVER_20NEAR_20HAILSTONE_2C_20UT_20_65_4_00065', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10168000 ', 'LITTLE COTTONWOOD CREEK @ JORDAN RIVER NR SLC ', 40.6638360000000034, -111.901880199999994, 'Utah ', NULL, 'Stream', '65', 'Gage height, feet', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2008-10-01 00:00:00', '2014-05-14 00:00:00', -7, 748615, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10168000&startDT=2008-10-01T00:0¶meterCd=00065', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20JORDAN_20RIVER_20NR_20SLC_20_65_4_00065%%22', NULL, 'usgs_LITTLE_20COTTONWOOD_20CREEK_20_40_20JORDAN_20RIVER_20NR_20SLC_20_65_4_00065', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10170500 ', 'SURPLUS CANAL @ SALT LAKE CITY, UT ', 40.7268907600000034, -111.926603700000001, 'Utah ', NULL, 'Canal', '65', 'Gage height, feet', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10170500&startDT=2007-10-01T00:0¶meterCd=00065', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_SURPLUS_20CANAL_20_40_20SALT_20LAKE_20CITY_2C_20UT_20_65_4_00065%%22', NULL, 'usgs_SURPLUS_20CANAL_20_40_20SALT_20LAKE_20CITY_2C_20UT_20_65_4_00065', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10171000 ', 'JORDAN RIVER @ 1700 SOUTH @ SALT LAKE CITY, UT ', 40.7335573699999998, -111.923270299999999, 'Utah ', NULL, 'Stream', '65', 'Gage height, feet', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10171000&startDT=2007-10-01T00:0¶meterCd=00065', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_JORDAN_20RIVER_20_40_201700_20SOUTH_20_40_20SALT_20LAKE_20CITY_2C_20UT_20_65_4_00065%%22', NULL, 'usgs_JORDAN_20RIVER_20_40_201700_20SOUTH_20_40_20SALT_20LAKE_20CITY_2C_20UT_20_65_4_00065', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10172200 ', 'RED BUTTE CREEK AT FORT DOUGLAS, NEAR SLC, UT ', 40.7799462700000035, -111.806044999999997, 'Utah ', NULL, 'Stream', '65', 'Gage height, feet', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10172200&startDT=2007-10-01T00:0¶meterCd=00065', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_RED_20BUTTE_20CREEK_20AT_20FORT_20DOUGLAS_2C_20NEAR_20SLC_2C_20UT_20_65_4_00065%%22', NULL, 'usgs_RED_20BUTTE_20CREEK_20AT_20FORT_20DOUGLAS_2C_20NEAR_20SLC_2C_20UT_20_65_4_00065', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10172630 ', 'GOGGIN DRAIN NEAR MAGNA UTAH ', 40.8166110700000004, -112.100776699999997, 'Utah ', NULL, 'Stream', '65', 'Gage height, feet', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10172630&startDT=2007-10-01T00:0¶meterCd=00065', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_GOGGIN_20DRAIN_20NEAR_20MAGNA_20UTAH_20_65_4_00065%%22', NULL, 'usgs_GOGGIN_20DRAIN_20NEAR_20MAGNA_20UTAH_20_65_4_00065', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10155200 ', 'PROVO RIV AT RIV ROAD BRIDGE NR HEBER CITY, UT ', 40.5543980500000032, -111.4332426, 'Utah ', NULL, 'Stream', '65', 'Gage height, feet', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10155200&startDT=2007-10-01T00:0¶meterCd=00065', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_PROVO_20RIV_20AT_20RIV_20ROAD_20BRIDGE_20NR_20HEBER_20CITY_2C_20UT_20_65_4_00065%%22', NULL, 'usgs_PROVO_20RIV_20AT_20RIV_20ROAD_20BRIDGE_20NR_20HEBER_20CITY_2C_20UT_20_65_4_00065', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10155500 ', 'PROVO RIVER NEAR CHARLESTON, UT ', 40.4841221000000004, -111.4635198, 'Utah ', NULL, 'Stream', '65', 'Gage height, feet', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10155500&startDT=2007-10-01T00:0¶meterCd=00065', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_PROVO_20RIVER_20NEAR_20CHARLESTON_2C_20UT_20_65_4_00065%%22', NULL, 'usgs_PROVO_20RIVER_20NEAR_20CHARLESTON_2C_20UT_20_65_4_00065', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10156000 ', 'SNAKE CREEK NEAR CHARLESTON, UT ', 40.4852332000000033, -111.467131199999997, 'Utah ', NULL, 'Stream', '65', 'Gage height, feet', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10156000&startDT=2007-10-01T00:0¶meterCd=00065', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_SNAKE_20CREEK_20NEAR_20CHARLESTON_2C_20UT_20_65_4_00065%%22', NULL, 'usgs_SNAKE_20CREEK_20NEAR_20CHARLESTON_2C_20UT_20_65_4_00065', -999999.0);
INSERT INTO "DataSeries" ("SourceDataServiceID", "Network", "SiteCode", "SiteName", "Latitude", "Longitude", "State", "County", "SiteType", "VariableCode", "VariableName", "VariableLevel", "MethodDescription", "VariableUnitsName", "VariableUnitsType", "VariableUnitsAbbreviation", "SampleMedium", "ValueType", "DataType", "GeneralCategory", "TimeSupport", "TimeSupportUnitsName", "TimeSupportUnitsType", "TimeSupportUnitsAbbreviation", "QualityControlLevelCode", "QualityControlLevelDefinition", "QualityControlLevelExplanation", "SourceOrganization", "SourceDescription", "BeginDateTime", "EndDateTime", "UTCOffset", "NumberObservations", "DateLastUpdated", "IsActive", "GetDataURL", "GetDataInflux", "ResultUUID", "InfluxIdentifier", "NoDataValue") VALUES (4, 'USGS Instantaneous', '10157500 ', 'DANIELS CREEK AT CHARLESTON, UT ', 40.4607890899999987, -111.472686999999993, 'Utah ', NULL, 'Stream', '65', 'Gage height, feet', 'Common ', 'Data collected using standard USGS methods.', NULL, NULL, 'ft', 'Water', 'Field Observation ', 'Continuous', 'Physical ', 15, 'minute', 'Time', 'min', 'USGS-A/P', 'Approved or Provisional', 'Data may be provisional without having been reviewed or edited or data may be approved by USGS.', 'USGS', 'Data retrieved from USGS NWIS', '2007-10-01 00:00:00', '2014-05-14 00:00:00', -7, 882205, '2014-05-14 00:00:00', 1, ' http://nwis.waterservices.usgs.gov/nwis/iv/?format=waterml,1.1&sites=10157500&startDT=2007-10-01T00:0¶meterCd=00065', 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=SELECT%%20%%2A%%20FROM%%20%%22usgs_DANIELS_20CREEK_20AT_20CHARLESTON_2C_20UT_20_65_4_00065%%22', NULL, 'usgs_DANIELS_20CREEK_20AT_20CHARLESTON_2C_20UT_20_65_4_00065', -999999.0);
"""
def fetch_usgs_waterml(fetch_url):
query_response = requests.get(fetch_url)
if '200' not in str(query_response.status_code):
print 'Response was not successful: {}'.format(query_response.status_code)
print 'URL: {}'.format(fetch_url)
print 'Message: {}'.format(query_response.text)
return None
return query_response.text
usgs_query = """
SELECT * from public."DataSeries"
WHERE "SourceOrganization" = 'USGS'
"""
def insert_into_catalog(connection_string, values):
to_conn = sqlalchemy.create_engine(connection_string)
values.to_sql(name="DataSeries", con=to_conn, if_exists="append", index=False)
def purge_catalog_and_insert_usgs_sites(credentials):
source_catalog_str = build_connection_string(**credentials.tsa_catalog_destination)
to_conn = sqlalchemy.create_engine(source_catalog_str)
conn = to_conn.connect()
conn.execute('DELETE FROM public."DataSeries"')
conn.execute('ALTER SEQUENCE "Catalog".series_increment RESTART WITH 1')
conn.execute(usgs_sites_insert_query)
def update_usgs_influx_timeseries(credentials):
source_catalog_str = build_connection_string(**credentials.tsa_catalog_destination)
catalog_table = pandas.read_sql(usgs_query, source_catalog_str)
no_timeseries = []
for i in range(0, len(catalog_table)):
fetch_url = catalog_table.get_value(i, 'GetDataURL')
identifier = catalog_table.get_value(i, 'InfluxIdentifier')
usgs_waterml = fetch_usgs_waterml(fetch_url)
if usgs_waterml is None:
print 'Waterml was none: {}'.format(fetch_url)
no_timeseries.append(fetch_url)
continue
print 'Parsed datapoints from waterml with length {}'.format(len(usgs_waterml))
influx_client = InfluxClient(**credentials.influx_credentials)
last_entry = influx_client.GetTimeSeriesEndTime(identifier)
timeseries = WaterMLParser.ExtractUSGSTimeSeriesDataPoints(usgs_waterml, last_entry)
if timeseries is not None and len(timeseries) > 0:
result = influx_client.AddDataFrameToDatabase(timeseries, identifier)
print result
del timeseries
else:
no_timeseries.append(fetch_url)
print 'Timeseries for identifier {} failed. Here\'s the URL: {}'.format(identifier, fetch_url)
del usgs_waterml
for fetch_url in no_timeseries:
print 'No timeseries for {}'.format(fetch_url)
| 1,223.963235
| 1,857
| 0.755171
| 38,430
| 332,918
| 6.396903
| 0.032266
| 0.01894
| 0.014205
| 0.012268
| 0.951325
| 0.949047
| 0.946468
| 0.945906
| 0.945809
| 0.945471
| 0
| 0.132801
| 0.074547
| 332,918
| 271
| 1,858
| 1,228.479705
| 0.665099
| 0
| 0
| 0.023529
| 0
| 0.760784
| 0.992857
| 0.200221
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.760784
| 0.043137
| null | null | 0.031373
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 12
|
9a37b9797832e4737c70e381e504964b5fa0be5a
| 131
|
py
|
Python
|
fisrtPyPiPackageSatishS248/__init__.py
|
satishs248/fisrtPyPiPackageSatishS248
|
cdff0b504ca355ee54406911ce5463cf2127f0e1
|
[
"MIT"
] | null | null | null |
fisrtPyPiPackageSatishS248/__init__.py
|
satishs248/fisrtPyPiPackageSatishS248
|
cdff0b504ca355ee54406911ce5463cf2127f0e1
|
[
"MIT"
] | null | null | null |
fisrtPyPiPackageSatishS248/__init__.py
|
satishs248/fisrtPyPiPackageSatishS248
|
cdff0b504ca355ee54406911ce5463cf2127f0e1
|
[
"MIT"
] | null | null | null |
from firstPyPiPackageSatishS248.BasicMath import BasicMath
from firstPyPiPackageSatishS248.StringOperations import StringOperations
| 65.5
| 72
| 0.931298
| 10
| 131
| 12.2
| 0.5
| 0.491803
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.048387
| 0.053435
| 131
| 2
| 72
| 65.5
| 0.935484
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7be7196cb08aaedf2a5f3cde655379c6bd6f56e8
| 1,015
|
py
|
Python
|
notebook/numpy_dstack.py
|
vhn0912/python-snippets
|
80b2e1d6b2b8f12ae30d6dbe86d25bb2b3a02038
|
[
"MIT"
] | 174
|
2018-05-30T21:14:50.000Z
|
2022-03-25T07:59:37.000Z
|
notebook/numpy_dstack.py
|
vhn0912/python-snippets
|
80b2e1d6b2b8f12ae30d6dbe86d25bb2b3a02038
|
[
"MIT"
] | 5
|
2019-08-10T03:22:02.000Z
|
2021-07-12T20:31:17.000Z
|
notebook/numpy_dstack.py
|
vhn0912/python-snippets
|
80b2e1d6b2b8f12ae30d6dbe86d25bb2b3a02038
|
[
"MIT"
] | 53
|
2018-04-27T05:26:35.000Z
|
2022-03-25T07:59:37.000Z
|
import numpy as np
a1 = np.ones((3, 4), int)
print(a1)
# [[1 1 1 1]
# [1 1 1 1]
# [1 1 1 1]]
a2 = np.full((3, 4), 2)
print(a2)
# [[2 2 2 2]
# [2 2 2 2]
# [2 2 2 2]]
print(np.dstack([a1, a2]))
# [[[1 2]
# [1 2]
# [1 2]
# [1 2]]
#
# [[1 2]
# [1 2]
# [1 2]
# [1 2]]
#
# [[1 2]
# [1 2]
# [1 2]
# [1 2]]]
print(np.dstack([a1, a2]).shape)
# (3, 4, 2)
print(np.dstack([a1, a2])[:, :, 0])
# [[1 1 1 1]
# [1 1 1 1]
# [1 1 1 1]]
print(np.dstack([a1, a2])[:, :, 1])
# [[2 2 2 2]
# [2 2 2 2]
# [2 2 2 2]]
print(np.concatenate([a1.reshape(3, 4, 1), a2.reshape(3, 4, 1)], 2))
# [[[1 2]
# [1 2]
# [1 2]
# [1 2]]
#
# [[1 2]
# [1 2]
# [1 2]
# [1 2]]
#
# [[1 2]
# [1 2]
# [1 2]
# [1 2]]]
a1 = np.ones(3, int)
print(a1)
# [1 1 1]
a2 = np.full(3, 2)
print(a2)
# [2 2 2]
print(np.dstack([a1, a2]))
# [[[1 2]
# [1 2]
# [1 2]]]
print(np.dstack([a1, a2]).shape)
# (1, 3, 2)
print(np.dstack([a1, a2])[:, :, 0])
# [[1 1 1]]
print(np.dstack([a1, a2])[:, :, 1])
# [[2 2 2]]
| 12.530864
| 68
| 0.396059
| 214
| 1,015
| 1.878505
| 0.088785
| 0.149254
| 0.186567
| 0.248756
| 0.818408
| 0.818408
| 0.728856
| 0.676617
| 0.676617
| 0.676617
| 0
| 0.229167
| 0.29064
| 1,015
| 80
| 69
| 12.6875
| 0.329167
| 0.414778
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.055556
| 0
| 0.055556
| 0.722222
| 0
| 0
| 1
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 10
|
d0b2561e7eebd857e4c59bdd9fd2d97eef5c1125
| 213
|
py
|
Python
|
arctic/project_template/app/forms.py
|
sanoma/django-arctic
|
18edd63e46b31ce0492b5991d036df896bd14bc6
|
[
"MIT"
] | 73
|
2016-08-15T11:37:08.000Z
|
2020-04-11T14:12:19.000Z
|
arctic/project_template/app/forms.py
|
sanoma/django-arctic
|
18edd63e46b31ce0492b5991d036df896bd14bc6
|
[
"MIT"
] | 331
|
2016-08-16T12:05:04.000Z
|
2020-04-16T18:39:46.000Z
|
arctic/project_template/app/forms.py
|
sanoma/django-arctic
|
18edd63e46b31ce0492b5991d036df896bd14bc6
|
[
"MIT"
] | 26
|
2016-08-17T12:58:30.000Z
|
2019-10-16T08:07:41.000Z
|
from django import forms
from .models import {{ camel_case_app_name }}
class {{ camel_case_app_name }}Form(forms.ModelForm):
class Meta:
fields = '__all__'
model = {{ camel_case_app_name }}
| 21.3
| 53
| 0.676056
| 28
| 213
| 4.678571
| 0.571429
| 0.206107
| 0.274809
| 0.366412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.220657
| 213
| 9
| 54
| 23.666667
| 0.789157
| 0
| 0
| 0
| 0
| 0
| 0.032864
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.333333
| null | null | 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
d0c9d7136efe436d07b83a58bcca58159daeed0e
| 207
|
py
|
Python
|
src/ctc/binary/abis/__init__.py
|
fei-protocol/checkthechain
|
ec838f3d0d44af228f45394d9ba8d8eb7f677520
|
[
"MIT"
] | 94
|
2022-02-15T19:34:49.000Z
|
2022-03-26T19:26:22.000Z
|
src/ctc/binary/abis/__init__.py
|
fei-protocol/checkthechain
|
ec838f3d0d44af228f45394d9ba8d8eb7f677520
|
[
"MIT"
] | 7
|
2022-03-03T02:58:47.000Z
|
2022-03-11T18:41:05.000Z
|
src/ctc/binary/abis/__init__.py
|
fei-protocol/checkthechain
|
ec838f3d0d44af228f45394d9ba8d8eb7f677520
|
[
"MIT"
] | 7
|
2022-02-15T17:53:07.000Z
|
2022-03-17T19:14:17.000Z
|
from .abi_coding import *
from .block_coding import *
from .contract_parsing import *
from .event_coding import *
from .event_parsing import *
from .function_coding import *
from .function_parsing import *
| 23
| 31
| 0.792271
| 28
| 207
| 5.607143
| 0.321429
| 0.382166
| 0.407643
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140097
| 207
| 8
| 32
| 25.875
| 0.882022
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d0d88b1d20c1a272b7150f3e4231c9e293df7205
| 1,162
|
py
|
Python
|
tests/endpoints/test_mint_token.py
|
s0b0lev/raiden-client-python
|
4eecdda10650f081e4449449949067af6356d542
|
[
"MIT"
] | 3
|
2019-08-01T12:47:16.000Z
|
2020-07-05T15:28:53.000Z
|
tests/endpoints/test_mint_token.py
|
s0b0lev/raiden-client-python
|
4eecdda10650f081e4449449949067af6356d542
|
[
"MIT"
] | 17
|
2019-08-01T07:51:58.000Z
|
2020-05-29T09:48:37.000Z
|
tests/endpoints/test_mint_token.py
|
s0b0lev/raiden-client-python
|
4eecdda10650f081e4449449949067af6356d542
|
[
"MIT"
] | null | null | null |
from raiden_client.endpoints.mint_tokens import MintTokens
def test_mint_tokens_request() -> None:
request = MintTokens(
token_address="0x145737846791E749f96344135Ce211BE8C510a17",
to="0xCcAbA1b954F29b3daD93A9f846f6356692154500",
value=10,
)
assert request.endpoint == f"/_testing/tokens/{request.token_address}/mint"
assert request.method == "post"
assert request.name == "mint-tokens"
payload = request.payload()
assert "to" in payload
assert "value" in payload
assert "contract_method" in payload
assert payload["contract_method"] == "mintFor"
def test_mint_tokens_method_request() -> None:
request = MintTokens(
token_address="0x145737846791E749f96344135Ce211BE8C510a17",
to="0xCcAbA1b954F29b3daD93A9f846f6356692154500",
value=10,
contract_method="mint",
)
assert request.endpoint == f"/_testing/tokens/{request.token_address}/mint"
assert request.method == "post"
payload = request.payload()
assert "to" in payload
assert "value" in payload
assert "contract_method" in payload
assert payload["contract_method"] == "mint"
| 34.176471
| 79
| 0.708262
| 117
| 1,162
| 6.863248
| 0.25641
| 0.129514
| 0.11208
| 0.042341
| 0.816936
| 0.816936
| 0.816936
| 0.816936
| 0.816936
| 0.816936
| 0
| 0.131775
| 0.190189
| 1,162
| 33
| 80
| 35.212121
| 0.721573
| 0
| 0
| 0.689655
| 0
| 0
| 0.314974
| 0.222031
| 0
| 0
| 0.144578
| 0
| 0.448276
| 1
| 0.068966
| false
| 0
| 0.034483
| 0
| 0.103448
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
32c645b5cde2f45442499e722e47181d88f2c778
| 17,717
|
py
|
Python
|
graphpype/pipelines/conmat_to_graph.py
|
EtienneCmb/graphpype
|
f19fdcd8e98660625a53c733ff8e44d60c31bd68
|
[
"BSD-3-Clause"
] | null | null | null |
graphpype/pipelines/conmat_to_graph.py
|
EtienneCmb/graphpype
|
f19fdcd8e98660625a53c733ff8e44d60c31bd68
|
[
"BSD-3-Clause"
] | null | null | null |
graphpype/pipelines/conmat_to_graph.py
|
EtienneCmb/graphpype
|
f19fdcd8e98660625a53c733ff8e44d60c31bd68
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Pipeline to compute graph and modularity with radatools and (possibly if installed) plot with igraph
"""
#import sys
#import time
#import numpy as np
#import scipy.sparse as sp
import nipype.pipeline.engine as pe
#from nipype.utils.misc import show_files
import nipype.interfaces.utility as niu
from graphpype.interfaces.radatools import PrepRada,NetPropRada,CommRada
from graphpype.nodes.modularity import ComputeNetList,ComputeNodeRoles
#import imp
try:
import igraph
can_plot_igraph = True
from graphpype.interfaces.plot_igraph.plots import PlotIGraphModules
except ImportError:
can_plot_igraph = False
def create_pipeline_conmat_to_graph_density( main_path, pipeline_name = "graph_den_pipe", con_den = 1.0,multi = False,mod = True, plot = False, optim_seq = "WS trfr 100"):
"""
Description:
Pipeline from connectivity matrices to graph analysis
Threshold is density based
Inputs (inputnode):
* conmat_file
* coords_file
* labels_file
"""
pipeline = pe.Workflow(name= pipeline_name + "_den_" + str(con_den).replace(".","_"))
pipeline.base_dir = main_path
inputnode = pe.Node(niu.IdentityInterface(fields=['conmat_file','coords_file','labels_file']),
name='inputnode')
if plot==True and can_plot_igraph==False:
print("warning, no igraph is installed, plot is set back to zero.")
plot = False
if multi:
################################################ density-based graphs #################################################
#### net_list
compute_net_List = pe.MapNode(interface = ComputeNetList(),name='compute_net_List',iterfield = ["Z_cor_mat_file"])
compute_net_List.inputs.density = con_den
pipeline.connect(inputnode, 'conmat_file',compute_net_List, 'Z_cor_mat_file')
##### radatools ################################################################
### prepare net_list for radatools processing
prep_rada = pe.MapNode(interface = PrepRada(),name='prep_rada',iterfield = ["net_List_file"])
prep_rada.inputs.network_type = "U"
pipeline.connect(compute_net_List, 'net_List_file', prep_rada, 'net_List_file')
if mod == True:
### compute community with radatools
community_rada = pe.MapNode(interface = CommRada(), name='community_rada',iterfield = ["Pajek_net_file"])
community_rada.inputs.optim_seq = optim_seq
pipeline.connect( prep_rada, 'Pajek_net_file',community_rada,'Pajek_net_file')
### node roles
node_roles = pe.MapNode(interface = ComputeNodeRoles(role_type = "4roles"), name='node_roles', iterfield = ['Pajek_net_file','rada_lol_file'])
pipeline.connect( prep_rada, 'Pajek_net_file',node_roles,'Pajek_net_file')
pipeline.connect( community_rada, 'rada_lol_file',node_roles,'rada_lol_file')
if plot == True :
#### plot_igraph_modules_rada
plot_igraph_modules_rada = pe.MapNode(interface = PlotIGraphModules(),name='plot_igraph_modules_rada',iterfield = ['Pajek_net_file','rada_lol_file','node_roles_file'])
pipeline.connect(prep_rada, 'Pajek_net_file',plot_igraph_modules_rada,'Pajek_net_file')
pipeline.connect(community_rada, 'rada_lol_file',plot_igraph_modules_rada,'rada_lol_file')
pipeline.connect(node_roles, 'node_roles_file',plot_igraph_modules_rada,'node_roles_file')
pipeline.connect(inputnode,'labels_file',plot_igraph_modules_rada,'labels_file')
############ compute network properties with rada
net_prop = pe.MapNode(interface = NetPropRada(optim_seq = "A"), name = 'net_prop',iterfield = ["Pajek_net_file"])
#net_prop.inputs.radatools_path = radatools_path
pipeline.connect(prep_rada, 'Pajek_net_file',net_prop,'Pajek_net_file')
else:
################################################ density-based graphs
#### net_list
compute_net_List = pe.Node(interface = ComputeNetList(),name='compute_net_List')
compute_net_List.inputs.density = con_den
pipeline.connect(inputnode, 'conmat_file',compute_net_List, 'Z_cor_mat_file')
##### radatools ################################################################
### prepare net_list for radatools processing
prep_rada = pe.Node(interface = PrepRada(),name='prep_rada')
prep_rada.inputs.network_type = "U"
pipeline.connect(compute_net_List, 'net_List_file', prep_rada, 'net_List_file')
if mod == True:
### compute community with radatools
community_rada = pe.Node(interface = CommRada(), name='community_rada')
community_rada.inputs.optim_seq = optim_seq
pipeline.connect( prep_rada, 'Pajek_net_file',community_rada,'Pajek_net_file')
### node roles
node_roles = pe.Node(interface = ComputeNodeRoles(role_type = "4roles"), name='node_roles')
pipeline.connect( prep_rada, 'Pajek_net_file',node_roles,'Pajek_net_file')
pipeline.connect( community_rada, 'rada_lol_file',node_roles,'rada_lol_file')
if plot == True:
#### plot_igraph_modules_rada
plot_igraph_modules_rada = pe.Node(interface = PlotIGraphModules(),name='plot_igraph_modules_rada')
pipeline.connect(prep_rada, 'Pajek_net_file',plot_igraph_modules_rada,'Pajek_net_file')
pipeline.connect(community_rada, 'rada_lol_file',plot_igraph_modules_rada,'rada_lol_file')
pipeline.connect(node_roles, 'node_roles_file',plot_igraph_modules_rada,'node_roles_file')
pipeline.connect(inputnode,'coords_file',plot_igraph_modules_rada,'coords_file')
pipeline.connect(inputnode,'labels_file',plot_igraph_modules_rada,'labels_file')
############ compute network properties with rada
net_prop = pe.Node(interface = NetPropRada(optim_seq = "A"), name = 'net_prop')
#net_prop.inputs.radatools_path = radatools_path
pipeline.connect(prep_rada, 'Pajek_net_file',net_prop,'Pajek_net_file')
return pipeline
def create_pipeline_conmat_to_graph_threshold(main_path,pipeline_name="graph_thr_pipe",con_thr = 1.0,multi = False,mod = True, plot = True, optim_seq = "WS trfr 100"):
"""
Description:
Pipeline from connectivity matrices to graph analysis
Threshold is value based (con_thr)
Inputs (inputnode):
* conmat_file
* coords_file
* labels_file
!Warning, need to be checked...
!Warning, should be merged with previous function create_pipeline_conmat_to_graph_density
"""
pipeline = pe.Workflow(name=pipeline_name)
pipeline.base_dir = main_path
inputnode = pe.Node(niu.IdentityInterface(fields=['conmat_file','coords_file','labels_file']), name='inputnode')
if plot==True and can_plot_igraph==False:
plot = False
if multi == False:
################################################ density-based graphs
#### net_list
compute_net_List = pe.Node(interface = ComputeNetList(),name='compute_net_List')
compute_net_List.inputs.threshold = con_thr
#compute_net_List.inputs.density = None
pipeline.connect(inputnode, 'conmat_file',compute_net_List, 'Z_cor_mat_file')
##### radatools ################################################################
### prepare net_list for radatools processing
prep_rada = pe.Node(interface = PrepRada(),name='prep_rada',iterfield = ["net_List_file"])
pipeline.connect(compute_net_List, 'net_List_file', prep_rada, 'net_List_file')
if mod == True:
### compute community with radatools
community_rada = pe.Node(interface = CommRada(), name='community_rada',iterfield = ["Pajek_net_file"])
community_rada.inputs.optim_seq = optim_seq
pipeline.connect( prep_rada, 'Pajek_net_file',community_rada,'Pajek_net_file')
if plot == True:
#### plot_igraph_modules_rada
plot_igraph_modules_rada = pe.Node(interface = PlotIGraphModules(),name='plot_igraph_modules_rada',iterfield = ['Pajek_net_file','rada_lol_file'])
pipeline.connect(prep_rada, 'Pajek_net_file',plot_igraph_modules_rada,'Pajek_net_file')
pipeline.connect(community_rada, 'rada_lol_file',plot_igraph_modules_rada,'rada_lol_file')
############ compute network properties with rada
net_prop = pe.Node(interface = NetPropRada(optim_seq = "A"), name = 'net_prop')
pipeline.connect(prep_rada, 'Pajek_net_file',net_prop,'Pajek_net_file')
else:
################################################ density-based graphs
#### net_list
compute_net_List = pe.MapNode(interface = ComputeNetList(),name='compute_net_List',iterfield = ["Z_cor_mat_file"])
compute_net_List.inputs.threshold = con_thr
pipeline.connect(inputnode, 'conmat_file',compute_net_List, 'Z_cor_mat_file')
##### radatools ################################################################
### prepare net_list for radatools processing
prep_rada = pe.MapNode(interface = PrepRada(),name='prep_rada',iterfield = ["net_List_file"])
prep_rada.inputs.network_type = "U"
pipeline.connect(compute_net_List, 'net_List_file', prep_rada, 'net_List_file')
if mod == True:
### compute community with radatools
community_rada = pe.MapNode(interface = CommRada(), name='community_rada',iterfield = ["Pajek_net_file"])
community_rada.inputs.optim_seq = optim_seq
pipeline.connect( prep_rada, 'Pajek_net_file',community_rada,'Pajek_net_file')
### node roles
node_roles = pe.MapNode(interface = ComputeNodeRoles(role_type = "4roles"), name='node_roles', iterfield = ['Pajek_net_file','rada_lol_file'])
pipeline.connect( prep_rada, 'Pajek_net_file',node_roles,'Pajek_net_file')
pipeline.connect( community_rada, 'rada_lol_file',node_roles,'rada_lol_file')
if plot == True :
#### plot_igraph_modules_rada
plot_igraph_modules_rada = pe.MapNode(interface = PlotIGraphModules(),name='plot_igraph_modules_rada',iterfield = ['Pajek_net_file','rada_lol_file','node_roles_file'])
pipeline.connect(prep_rada, 'Pajek_net_file',plot_igraph_modules_rada,'Pajek_net_file')
pipeline.connect(community_rada, 'rada_lol_file',plot_igraph_modules_rada,'rada_lol_file')
pipeline.connect(node_roles, 'node_roles_file',plot_igraph_modules_rada,'node_roles_file')
pipeline.connect(inputnode,'labels_file',plot_igraph_modules_rada,'labels_file')
############ compute network properties with rada
net_prop = pe.MapNode(interface = NetPropRada(optim_seq = "A"), name = 'net_prop',iterfield = ["Pajek_net_file"])
#net_prop.inputs.radatools_path = radatools_path
pipeline.connect(prep_rada, 'Pajek_net_file',net_prop,'Pajek_net_file')
return pipeline
def create_pipeline_net_list_to_graph( main_path, pipeline_name = "graph_net_pipe", multi = False,mod = True, plot = False, optim_seq = "WS trfr 100"):
"""
Description:
Pipeline from net_List (txt file) to graph analysis
Inputs (inputnode):
* net_List_file
* coords_file
* labels_file
Could be used in the previous functions (create_pipeline_conmat_to_graph_density and create_pipeline_conmat_to_graph_threshold)
"""
pipeline = pe.Workflow(name= pipeline_name)
pipeline.base_dir = main_path
inputnode = pe.Node(niu.IdentityInterface(fields=['net_List_file','coords_file','labels_file']),
name='inputnode')
if plot==True and can_plot_igraph==False:
plot = False
if multi == False:
################################################ density-based graphs
##### radatools ################################################################
### prepare net_list for radatools processing
prep_rada = pe.Node(interface = PrepRada(),name='prep_rada')
prep_rada.inputs.network_type = "U"
pipeline.connect(inputnode, 'net_List_file', prep_rada, 'net_List_file')
if mod == True:
### compute community with radatools
community_rada = pe.Node(interface = CommRada(), name='community_rada')
community_rada.inputs.optim_seq = optim_seq
pipeline.connect( prep_rada, 'Pajek_net_file',community_rada,'Pajek_net_file')
### node roles
node_roles = pe.Node(interface = ComputeNodeRoles(role_type = "4roles"), name='node_roles')
pipeline.connect( prep_rada, 'Pajek_net_file',node_roles,'Pajek_net_file')
pipeline.connect( community_rada, 'rada_lol_file',node_roles,'rada_lol_file')
if plot == True:
#### plot_igraph_modules_rada
plot_igraph_modules_rada = pe.Node(interface = PlotIGraphModules(),name='plot_igraph_modules_rada')
pipeline.connect(prep_rada, 'Pajek_net_file',plot_igraph_modules_rada,'Pajek_net_file')
pipeline.connect(community_rada, 'rada_lol_file',plot_igraph_modules_rada,'rada_lol_file')
pipeline.connect(node_roles, 'node_roles_file',plot_igraph_modules_rada,'node_roles_file')
pipeline.connect(inputnode,'coords_file',plot_igraph_modules_rada,'coords_file')
pipeline.connect(inputnode,'labels_file',plot_igraph_modules_rada,'labels_file')
############ compute network properties with rada
net_prop = pe.Node(interface = NetPropRada(optim_seq = "A"), name = 'net_prop')
#net_prop.inputs.radatools_path = radatools_path
pipeline.connect(prep_rada, 'Pajek_net_file',net_prop,'Pajek_net_file')
else:
################################################ density-based graphs #################################################
##### radatools ################################################################
### prepare net_list for radatools processing
prep_rada = pe.MapNode(interface = PrepRada(),name='prep_rada',iterfield = ["net_List_file"])
prep_rada.inputs.network_type = "U"
pipeline.connect(compute_net_List, 'net_List_file', prep_rada, 'net_List_file')
if mod == True:
### compute community with radatools
community_rada = pe.MapNode(interface = CommRada(), name='community_rada',iterfield = ["Pajek_net_file"])
#community_rada.inputs.optim_seq = radatools_optim
pipeline.connect( prep_rada, 'Pajek_net_file',community_rada,'Pajek_net_file')
### node roles
node_roles = pe.MapNode(interface = ComputeNodeRoles(role_type = "4roles"), name='node_roles', iterfield = ['Pajek_net_file','rada_lol_file'])
pipeline.connect( prep_rada, 'Pajek_net_file',node_roles,'Pajek_net_file')
pipeline.connect( community_rada, 'rada_lol_file',node_roles,'rada_lol_file')
if plot == True :
#### plot_igraph_modules_rada
plot_igraph_modules_rada = pe.MapNode(interface = PlotIGraphModules(),name='plot_igraph_modules_rada',iterfield = ['Pajek_net_file','rada_lol_file','node_roles_file'])
pipeline.connect(prep_rada, 'Pajek_net_file',plot_igraph_modules_rada,'Pajek_net_file')
pipeline.connect(community_rada, 'rada_lol_file',plot_igraph_modules_rada,'rada_lol_file')
pipeline.connect(node_roles, 'node_roles_file',plot_igraph_modules_rada,'node_roles_file')
############ compute network properties with rada
net_prop = pe.MapNode(interface = NetPropRada(optim_seq = "A"), name = 'net_prop',iterfield = ["Pajek_net_file"])
#net_prop.inputs.radatools_path = radatools_path
pipeline.connect(prep_rada, 'Pajek_net_file',net_prop,'Pajek_net_file')
return pipeline
| 42.794686
| 183
| 0.59474
| 1,909
| 17,717
| 5.146674
| 0.074908
| 0.048855
| 0.073282
| 0.087634
| 0.913791
| 0.90341
| 0.883359
| 0.873893
| 0.864733
| 0.862697
| 0
| 0.001461
| 0.26579
| 17,717
| 413
| 184
| 42.898305
| 0.753844
| 0.136987
| 0
| 0.850649
| 0
| 0
| 0.186552
| 0.010203
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019481
| false
| 0
| 0.045455
| 0
| 0.084416
| 0.006494
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0896cc74a866534c0e6552fdc753f823e35516d7
| 2,290
|
py
|
Python
|
src/test/test_present_perfect_indicitive.py
|
shrutiichandra/spanish-conjugator
|
2ebf41b92c14c3e47a873c52fdf4ce1d17bff5e0
|
[
"MIT"
] | null | null | null |
src/test/test_present_perfect_indicitive.py
|
shrutiichandra/spanish-conjugator
|
2ebf41b92c14c3e47a873c52fdf4ce1d17bff5e0
|
[
"MIT"
] | null | null | null |
src/test/test_present_perfect_indicitive.py
|
shrutiichandra/spanish-conjugator
|
2ebf41b92c14c3e47a873c52fdf4ce1d17bff5e0
|
[
"MIT"
] | null | null | null |
# -*- coding: iso-8859-15 -*-
import spanishconjugator
from spanishconjugator.SpanishConjugator import Conjugator
# ------------------------------ Present Perfect Compound Tense ------------------------------- #
def test_present_perfect_indicative_yo_ar():
expected = "he hablado"
assert Conjugator().conjugate('hablar','present_perfect','indicative','yo') == expected
def test_present_perfect_indicative_tu_ar():
expected = "has hablado"
assert Conjugator().conjugate('hablar','present_perfect','indicative','tu') == expected
def test_present_perfect_indicative_usted_ar():
expected = "ha hablado"
assert Conjugator().conjugate('hablar','present_perfect','indicative','usted') == expected
def test_present_perfect_indicative_nosotros_ar():
expected = "hemos hablado"
assert Conjugator().conjugate('hablar','present_perfect','indicative','nosotros') == expected
def test_present_perfect_indicative_vosotros_ar():
expected = "habéis hablado"
assert Conjugator().conjugate('hablar','present_perfect','indicative','vosotros') == expected
def test_present_perfect_indicative_ustedes_ar():
expected = "han hablado"
assert Conjugator().conjugate('hablar','present_perfect','indicative','ustedes') == expected
def test_present_perfect_indicative_yo_er():
expected = "he bebido"
assert Conjugator().conjugate('beber','present_perfect','indicative','yo') == expected
def test_present_perfect_indicative_tu_ir():
expected = "has vivido"
assert Conjugator().conjugate('vivir','present_perfect','indicative','tu') == expected
def test_present_perfect_indicative_usted_er():
expected = "ha bebido"
assert Conjugator().conjugate('beber','present_perfect','indicative','usted') == expected
def test_present_perfect_indicative_nosotros_ir():
expected = "hemos vivido"
assert Conjugator().conjugate('vivir','present_perfect','indicative','nosotros') == expected
def test_present_perfect_indicative_vosotros_er():
expected = "habéis bebido"
assert Conjugator().conjugate('beber','present_perfect','indicative','vosotros') == expected
def test_present_perfect_indicative_ustedes_ir():
expected = "han vivido"
assert Conjugator().conjugate('vivir','present_perfect','indicative','ustedes') == expected
| 43.207547
| 97
| 0.734061
| 243
| 2,290
| 6.621399
| 0.160494
| 0.217526
| 0.357986
| 0.156619
| 0.825357
| 0.811684
| 0.765693
| 0.765693
| 0.459913
| 0.459913
| 0
| 0.002937
| 0.10786
| 2,290
| 53
| 98
| 43.207547
| 0.78463
| 0.052838
| 0
| 0
| 0
| 0
| 0.259584
| 0
| 0
| 0
| 0
| 0
| 0.315789
| 1
| 0.315789
| false
| 0
| 0.052632
| 0
| 0.368421
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
08a2812da3d71ede8fea586985ec1481552f5b8a
| 5,488
|
py
|
Python
|
src/FFEAT/test/strategies/initialization/UniformTest.py
|
PatrikValkovic/MasterThesis
|
6e9f3b186541db6c8395ebc96ace7289d01c805b
|
[
"MIT"
] | null | null | null |
src/FFEAT/test/strategies/initialization/UniformTest.py
|
PatrikValkovic/MasterThesis
|
6e9f3b186541db6c8395ebc96ace7289d01c805b
|
[
"MIT"
] | null | null | null |
src/FFEAT/test/strategies/initialization/UniformTest.py
|
PatrikValkovic/MasterThesis
|
6e9f3b186541db6c8395ebc96ace7289d01c805b
|
[
"MIT"
] | null | null | null |
###############################
#
# Created by Patrik Valkovic
# 3/12/2021
#
###############################
import unittest
import torch as t
from ffeat.strategies import initialization
class UniformTest(unittest.TestCase):
def test_population_size_match(self):
i = initialization.Uniform(51, -2.0, 2.0, 312)
pop, kargs = i()
self.assertEqual(pop[0].shape, (51, 312))
def test_population_dimension_match(self):
i = initialization.Uniform(51, -2.0, 2.0, (8,7))
pop, kargs = i()
self.assertEqual(pop[0].shape, (51, 8, 7))
def test_dimension_from_min(self):
i = initialization.Uniform(51, t.full((312,), -2.0), 2.0)
pop, kargs = i()
self.assertEqual(pop[0].shape, (51, 312))
def test_dimension_from_min_multidimensional(self):
i = initialization.Uniform(51, t.full((8,7), -2.0), 2.0)
pop, kargs = i()
self.assertEqual(pop[0].shape, (51, 8, 7))
def test_dimension_from_max(self):
i = initialization.Uniform(51, -2.0, t.full((312,), 2.0))
pop, kargs = i()
self.assertEqual(pop[0].shape, (51, 312))
def test_dimension_from_max_multidimensional(self):
i = initialization.Uniform(51, -2.0, t.full((8,7), 2.0))
pop, kargs = i()
self.assertEqual(pop[0].shape, (51, 8, 7))
def test_missing_dimension(self):
with self.assertRaises(ValueError):
initialization.Uniform(51, -2.0, 2.0)
def test_max_min_dimensions_not_match(self):
with self.assertRaises(ValueError):
initialization.Uniform(51, t.full((4,5), -2.0), t.full((8,7), 2.0))
def test_min_dimension_not_match(self):
with self.assertRaises(ValueError):
initialization.Uniform(51, t.full((4,5), -2.0), 2.0, (4,6))
def test_max_dimension_not_match(self):
with self.assertRaises(ValueError):
initialization.Uniform(51, -2.0, t.full((4,5), 2.0), (4,6))
def test_max_smaller_than_min(self):
with self.assertRaises(ValueError):
initialization.Uniform(51, 2.0, -2.0, (4,6))
def test_min_int(self):
i = initialization.Uniform(51, -2, 2.0, (4,6))
pop, kargs = i()
self.assertEqual(pop[0].shape, (51, 4, 6))
def test_min_float(self):
i = initialization.Uniform(51, -2, 2.0, (4,6))
pop, kargs = i()
self.assertEqual(pop[0].shape, (51, 4, 6))
def test_min_list(self):
i = initialization.Uniform(51, [[-2.0]*6]*4, 2.0)
pop, kargs = i()
self.assertEqual(pop[0].shape, (51, 4, 6))
def test_min_tensor(self):
i = initialization.Uniform(51, t.full((4,6), -2.0), 2.0)
pop, kargs = i()
self.assertEqual(pop[0].shape, (51, 4, 6))
def test_max_int(self):
i = initialization.Uniform(51, -2.0, 2, (4,6))
pop, kargs = i()
self.assertEqual(pop[0].shape, (51, 4, 6))
def test_max_float(self):
i = initialization.Uniform(51, -2.0, 2.0, (4,6))
pop, kargs = i()
self.assertEqual(pop[0].shape, (51, 4, 6))
def test_max_list(self):
i = initialization.Uniform(51, -2.0, [[2.0]*6]*4)
pop, kargs = i()
self.assertEqual(pop[0].shape, (51, 4, 6))
def test_max_tensor(self):
i = initialization.Uniform(51, -2.0, t.full((4,6), 2.0))
pop, kargs = i()
self.assertEqual(pop[0].shape, (51, 4, 6))
def test_max_works(self):
i = initialization.Uniform(51, -2.0, t.full((4,6), 2.0))
for _ in range(100):
pop, kargs = i()
self.assertTrue(t.all(pop[0] < 2.0))
def test_min_works(self):
i = initialization.Uniform(51, t.full((4,6), -2.0), 2.0)
for _ in range(100):
pop, kargs = i()
self.assertTrue(t.all(pop[0] >= -2.0))
def test_shifted(self):
i = initialization.Uniform(51, 3.0, t.full((4,6), 5.0))
for _ in range(100):
pop, kargs = i()
self.assertTrue(t.all(pop[0] >= 3.0))
self.assertTrue(t.all(pop[0] < 5.0))
def test_float16_type(self):
i = initialization.Uniform(51, 3.0, t.full((4,6), 5.0), dtype=t.float16)
pop, kargs = i()
self.assertEqual(pop[0].dtype, t.float16)
def test_long_type(self):
i = initialization.Uniform(51, 3.0, t.full((4,6), 5.0), dtype=t.long)
pop, kargs = i()
self.assertEqual(pop[0].dtype, t.long)
def test_int8_type(self):
i = initialization.Uniform(51, 3.0, t.full((4,6), 5.0), dtype=t.int8)
pop, kargs = i()
self.assertEqual(pop[0].dtype, t.int8)
@unittest.skipIf(not t.cuda.is_available(), "CUDA not available")
def test_on_cuda(self):
i = initialization.Uniform(51, 3.0, 5.0, (7,8), device='cuda')
pop, kargs = i()
self.assertEqual(pop[0].device, t.device('cuda:0'))
@unittest.skipIf(not t.cuda.is_available(), "CUDA not available")
def test_device_from_min(self):
i = initialization.Uniform(51, t.full((7,8), 3.0, device='cuda'), 5.0)
pop, kargs = i()
self.assertEqual(pop[0].device, t.device('cuda:0'))
@unittest.skipIf(not t.cuda.is_available(), "CUDA not available")
def test_device_from_max(self):
i = initialization.Uniform(51, 3.0, t.full((7,8), 5.0, device='cuda'))
pop, kargs = i()
self.assertEqual(pop[0].device, t.device('cuda:0'))
if __name__ == '__main__':
unittest.main()
| 34.3
| 80
| 0.574344
| 829
| 5,488
| 3.696019
| 0.095296
| 0.026762
| 0.210183
| 0.19517
| 0.885117
| 0.873695
| 0.847911
| 0.814621
| 0.780352
| 0.668407
| 0
| 0.083393
| 0.2418
| 5,488
| 159
| 81
| 34.515723
| 0.652968
| 0.00656
| 0
| 0.475
| 0
| 0
| 0.017084
| 0
| 0
| 0
| 0
| 0
| 0.241667
| 1
| 0.233333
| false
| 0
| 0.025
| 0
| 0.266667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
08c20d80446c0e06a425a2efd1e7ecec5e36c9cc
| 135
|
py
|
Python
|
pywco/__init__.py
|
tyrylu/pywco
|
945aeae7e89c20a4c8ff54f6b496ebfc176aa0e2
|
[
"MIT"
] | null | null | null |
pywco/__init__.py
|
tyrylu/pywco
|
945aeae7e89c20a4c8ff54f6b496ebfc176aa0e2
|
[
"MIT"
] | 3
|
2019-03-05T22:02:21.000Z
|
2019-03-14T20:53:43.000Z
|
pywco/__init__.py
|
tyrylu/pywco
|
945aeae7e89c20a4c8ff54f6b496ebfc176aa0e2
|
[
"MIT"
] | null | null | null |
from .client import Client
from .server import Server
from .communicator import Communicator
#from .communicator import command_handler
| 33.75
| 42
| 0.851852
| 17
| 135
| 6.705882
| 0.411765
| 0.280702
| 0.385965
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 135
| 4
| 42
| 33.75
| 0.95
| 0.303704
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
08ceafdde3c321be56b370ad5eb0bb9fc79d6f64
| 182
|
py
|
Python
|
modules/ranking/models/exp_ranking.py
|
heolin123/funcrowd
|
20167783de208394c09ed0429a5f02ec6dd79c42
|
[
"MIT"
] | null | null | null |
modules/ranking/models/exp_ranking.py
|
heolin123/funcrowd
|
20167783de208394c09ed0429a5f02ec6dd79c42
|
[
"MIT"
] | 11
|
2019-11-12T23:26:45.000Z
|
2021-06-10T17:37:23.000Z
|
modules/ranking/models/exp_ranking.py
|
heolin123/funcrowd
|
20167783de208394c09ed0429a5f02ec6dd79c42
|
[
"MIT"
] | null | null | null |
from modules.ranking.models.ranking import Ranking
from modules.ranking.query.base import EXP_RANKING_BASE_QUERY
class ExpRanking(Ranking):
BASE_QUERY = EXP_RANKING_BASE_QUERY
| 26
| 61
| 0.840659
| 26
| 182
| 5.615385
| 0.384615
| 0.226027
| 0.328767
| 0.260274
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104396
| 182
| 6
| 62
| 30.333333
| 0.895706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
08e5e6afdf0ad8a3e4a8219b847340b5ece6f921
| 5,507
|
py
|
Python
|
test/test_mod_contact.py
|
dvkruchinin/python_tr
|
eaa901759e78f6036491556f71f9f19d5830bd98
|
[
"MIT"
] | null | null | null |
test/test_mod_contact.py
|
dvkruchinin/python_tr
|
eaa901759e78f6036491556f71f9f19d5830bd98
|
[
"MIT"
] | null | null | null |
test/test_mod_contact.py
|
dvkruchinin/python_tr
|
eaa901759e78f6036491556f71f9f19d5830bd98
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding:utf-8
"""
Name : test_mod_contact.py
Author : Dmitry Kruchinin
Date : 6/21/2021
Desc:
"""
from model.contact import Contact
from random import randrange
create_contact_if_missing = Contact(firstname="firstname", lastname="lastname",
homephone="homephone111", mobilephone="mobilephone222",
workphone="workphone333", secondaryphone="secphone444", address="home",
email="name@home.local", email2="name2@home.local", email3="name3@home.local")
def test_modification_some_contact(app, db, check_ui):
if len(db.get_contact_list()) == 0:
app.contact.create(create_contact_if_missing)
old_contacts = db.get_contact_list()
index = randrange(len(old_contacts))
contact = (Contact(firstname="newfname",
lastname="newlname",
address="newhome",
mobilephone="8888888888",
email="newname@newhome.local"))
contact.id = old_contacts[index].id
app.contact.modification_contact_by_id(contact.id, contact)
new_contacts = db.get_contact_list()
old_contacts[index] = contact
assert old_contacts == new_contacts
if check_ui:
assert sorted(old_contacts, key=Contact.id_or_max) == sorted(app.contact.get_contact_list(),
key=Contact.id_or_max)
def test_modification_some_contact_first_name(app, db, check_ui):
if len(db.get_contact_list()) == 0:
app.contact.create(create_contact_if_missing)
old_contacts = db.get_contact_list()
index = randrange(len(old_contacts))
contact = Contact(firstname="New first name")
contact.id = old_contacts[index].id
contact.lastname = old_contacts[index].lastname
app.contact.modification_contact_by_id(contact.id, contact)
new_contacts = db.get_contact_list()
old_contacts[index] = contact
assert old_contacts == new_contacts
if check_ui:
assert sorted(old_contacts, key=Contact.id_or_max) == sorted(app.contact.get_contact_list(),
key=Contact.id_or_max)
def test_modification_some_contact_last_name(app, db, check_ui):
if len(db.get_contact_list()) == 0:
app.group.create(Contact(create_contact_if_missing))
old_contacts = db.get_contact_list()
index = randrange(len(old_contacts))
contact = Contact(lastname="New last name")
contact.id = old_contacts[index].id
contact.firstname = old_contacts[index].firstname
app.contact.modification_contact_by_id(contact.id, contact)
new_contacts = db.get_contact_list()
old_contacts[index] = contact
assert old_contacts == new_contacts
if check_ui:
assert sorted(old_contacts, key=Contact.id_or_max) == sorted(app.contact.get_contact_list(),
key=Contact.id_or_max)
def test_modification_some_contact_address(app, db, check_ui):
if len(db.get_contact_list()) == 0:
app.group.create(Contact(create_contact_if_missing))
old_contacts = db.get_contact_list()
index = randrange(len(old_contacts))
contact = Contact(address="New address")
contact.id = old_contacts[index].id
contact.firstname = old_contacts[index].firstname
contact.lastname = old_contacts[index].lastname
app.contact.modification_contact_by_id(contact.id, contact)
new_contacts = db.get_contact_list()
old_contacts[index] = contact
assert old_contacts == new_contacts
if check_ui:
assert sorted(old_contacts, key=Contact.id_or_max) == sorted(app.contact.get_contact_list(),
key=Contact.id_or_max)
def test_modification_some_contact_phone(app, db, check_ui):
if len(db.get_contact_list()) == 0:
app.contact.create(create_contact_if_missing)
old_contacts = db.get_contact_list()
index = randrange(len(old_contacts))
contact = Contact(mobilephone="7777777777")
contact.id = old_contacts[index].id
contact.firstname = old_contacts[index].firstname
contact.lastname = old_contacts[index].lastname
app.contact.modification_contact_by_id(contact.id, contact)
new_contacts = db.get_contact_list()
old_contacts[index] = contact
assert old_contacts == new_contacts
if check_ui:
assert sorted(old_contacts, key=Contact.id_or_max) == sorted(app.contact.get_contact_list(),
key=Contact.id_or_max)
def test_modification_some_contact_email(app, db, check_ui):
if len(db.get_contact_list()) == 0:
app.contact.create(create_contact_if_missing)
old_contacts = db.get_contact_list()
index = randrange(len(old_contacts))
contact = Contact(email="name@newemail.local")
contact.id = old_contacts[index].id
contact.firstname = old_contacts[index].firstname
contact.lastname = old_contacts[index].lastname
app.contact.modification_contact_by_id(contact.id, contact)
new_contacts = db.get_contact_list()
old_contacts[index] = contact
assert old_contacts == new_contacts
if check_ui:
assert sorted(old_contacts, key=Contact.id_or_max) == sorted(app.contact.get_contact_list(),
key=Contact.id_or_max)
| 44.056
| 110
| 0.662248
| 680
| 5,507
| 5.060294
| 0.116176
| 0.140657
| 0.097646
| 0.083697
| 0.842778
| 0.83406
| 0.83406
| 0.823307
| 0.813136
| 0.813136
| 0
| 0.011913
| 0.237879
| 5,507
| 124
| 111
| 44.41129
| 0.807958
| 0.021609
| 0
| 0.784314
| 0
| 0
| 0.044246
| 0.003904
| 0
| 0
| 0
| 0
| 0.117647
| 1
| 0.058824
| false
| 0
| 0.019608
| 0
| 0.078431
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3ebeaf0026e19456bface23d892ceb3992af05f1
| 14,344
|
py
|
Python
|
setup.py
|
wallisonalves/fbi
|
bb0db5365df3087261eefcd5fea67ac9017d9f33
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
wallisonalves/fbi
|
bb0db5365df3087261eefcd5fea67ac9017d9f33
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
wallisonalves/fbi
|
bb0db5365df3087261eefcd5fea67ac9017d9f33
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
from setuptools import setup
# silence pyflakes, __VERSION__ is properly assigned below...
__VERSION__ = '3.0'
PROGRAM_VERSION = __VERSION__
def datafilelist(installbase, sourcebase):
datafileList = []
for root, subFolders, files in os.walk(sourcebase):
fileList = []
for f in files:
fileList.append(os.path.join(root, f))
datafileList.append((root.replace(sourcebase, installbase), fileList))
return datafileList
data_files = [
('{prefix}/share/applications'.format(prefix=sys.prefix), ['src/fbi.desktop']),
('{prefix}/lib/fbi'.format(prefix=sys.prefix), ['src/install.png']),
('{prefix}/lib/fbi'.format(prefix=sys.prefix), ['src/logo.png']),
('{prefix}/lib/fbi'.format(prefix=sys.prefix), ['src/create_cfg.py']),
('{prefix}/lib/fbi'.format(prefix=sys.prefix), ['src/end.py']),
('{prefix}/lib/fbi'.format(prefix=sys.prefix), ['src/error.py']),
('{prefix}/lib/fbi'.format(prefix=sys.prefix), ['src/fbiWindow.py']),
('{prefix}/lib/fbi'.format(prefix=sys.prefix), ['src/freebsd-style.css']),
('{prefix}/lib/fbi'.format(prefix=sys.prefix), ['src/install.py']),
('{prefix}/lib/fbi'.format(prefix=sys.prefix), ['src/installType.py']),
('{prefix}/lib/fbi'.format(prefix=sys.prefix), ['src/keyboard.py']),
('{prefix}/lib/fbi'.format(prefix=sys.prefix), ['src/language.py']),
('{prefix}/lib/fbi'.format(prefix=sys.prefix), ['src/partition.py']),
('{prefix}/lib/fbi'.format(prefix=sys.prefix), ['src/partition_handler.py']),
('{prefix}/lib/fbi'.format(prefix=sys.prefix), ['src/root.py']),
('{prefix}/lib/fbi'.format(prefix=sys.prefix), ['src/slides.py']),
('{prefix}/lib/fbi'.format(prefix=sys.prefix), ['src/timezone.py']),
('{prefix}/lib/fbi'.format(prefix=sys.prefix), ['src/use_ufs.py']),
('{prefix}/lib/fbi'.format(prefix=sys.prefix), ['src/use_zfs.py']),
('{prefix}/lib/fbi'.format(prefix=sys.prefix), ['src/addUser.py']),
('{prefix}/lib/fbi/backend-query'.format(prefix=sys.prefix), ['src/backend-query/detect-laptop.sh']),
('{prefix}/lib/fbi/backend-query'.format(prefix=sys.prefix), ['src/backend-query/detect-nics.sh']),
('{prefix}/lib/fbi/backend-query'.format(prefix=sys.prefix), ['src/backend-query/detect-sheme.sh']),
('{prefix}/lib/fbi/backend-query'.format(prefix=sys.prefix), ['src/backend-query/detect-vmware.sh']),
('{prefix}/lib/fbi/backend-query'.format(prefix=sys.prefix), ['src/backend-query/detect-wifi.sh']),
('{prefix}/lib/fbi/backend-query'.format(prefix=sys.prefix), ['src/backend-query/disk-info.sh']),
('{prefix}/lib/fbi/backend-query'.format(prefix=sys.prefix), ['src/backend-query/disk-label.sh']),
('{prefix}/lib/fbi/backend-query'.format(prefix=sys.prefix), ['src/backend-query/disk-list.sh']),
('{prefix}/lib/fbi/backend-query'.format(prefix=sys.prefix), ['src/backend-query/disk-part.sh']),
('{prefix}/lib/fbi/backend-query'.format(prefix=sys.prefix), ['src/backend-query/enable-net.sh']),
('{prefix}/lib/fbi/backend-query'.format(prefix=sys.prefix), ['src/backend-query/list-components.sh']),
('{prefix}/lib/fbi/backend-query'.format(prefix=sys.prefix), ['src/backend-query/list-rsync-backups.sh']),
('{prefix}/lib/fbi/backend-query'.format(prefix=sys.prefix), ['src/backend-query/list-tzones.sh']),
('{prefix}/lib/fbi/backend-query'.format(prefix=sys.prefix), ['src/backend-query/query-langs.sh']),
('{prefix}/lib/fbi/backend-query'.format(prefix=sys.prefix), ['src/backend-query/send-logs.sh']),
('{prefix}/lib/fbi/backend-query'.format(prefix=sys.prefix), ['src/backend-query/setup-ssh-keys.sh']),
('{prefix}/lib/fbi/backend-query'.format(prefix=sys.prefix), ['src/backend-query/sys-mem.sh']),
('{prefix}/lib/fbi/backend-query'.format(prefix=sys.prefix), ['src/backend-query/test-live.sh']),
('{prefix}/lib/fbi/backend-query'.format(prefix=sys.prefix), ['src/backend-query/test-netup.sh']),
('{prefix}/lib/fbi/backend-query'.format(prefix=sys.prefix), ['src/backend-query/update-part-list.sh']),
('{prefix}/lib/fbi/backend-query'.format(prefix=sys.prefix), ['src/backend-query/xkeyboard-layouts.sh']),
('{prefix}/lib/fbi/backend-query'.format(prefix=sys.prefix), ['src/backend-query/xkeyboard-models.sh']),
('{prefix}/lib/fbi/backend-query'.format(prefix=sys.prefix), ['src/backend-query/xkeyboard-variants.sh']),
('{prefix}/lib/fbi/keyboard'.format(prefix=sys.prefix), ['src/keyboard/layout']),
('{prefix}/lib/fbi/keyboard'.format(prefix=sys.prefix), ['src/keyboard/model']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/af']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/am']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/ara']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/at']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/az']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/ba']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/bd']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/be']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/bg']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/br']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/brai']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/by']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/ca']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/ch']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/cn']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/cz']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/de']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/dk']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/ee']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/epo']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/es']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/fi']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/fo']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/fr']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/gb']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/ge']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/gh']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/gr']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/hr']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/hu']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/ie']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/il']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/in']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/iq']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/ir']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/is']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/it']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/jp']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/ke']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/kg']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/kz']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/latam']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/lk']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/lt']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/lv']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/ma']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/me']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/mk']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/ml']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/mt']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/ng']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/nl']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/no']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/ph']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/pk']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/pl']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/pt']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/ro']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/rs']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/ru']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/se']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/si']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/sk']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/sy']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/th']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/tj']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/tm']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/tr']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/ua']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/us']),
('{prefix}/lib/fbi/keyboard/variant'.format(prefix=sys.prefix), ['src/keyboard/variant/uz']),
('{prefix}/lib/fbi/slide-images/freebsd'.format(prefix=sys.prefix), ['src/slide-images/freebsd/browser.png']),
('{prefix}/lib/fbi/slide-images/freebsd'.format(prefix=sys.prefix), ['src/slide-images/freebsd/email.png']),
('{prefix}/lib/fbi/slide-images/freebsd'.format(prefix=sys.prefix), ['src/slide-images/freebsd/help.png']),
('{prefix}/lib/fbi/slide-images/freebsd'.format(prefix=sys.prefix), ['src/slide-images/freebsd/F-logo.png']),
('{prefix}/lib/fbi/slide-images/freebsd'.format(prefix=sys.prefix), ['src/slide-images/freebsd/music.png']),
('{prefix}/lib/fbi/slide-images/freebsd'.format(prefix=sys.prefix), ['src/slide-images/freebsd/office.png']),
('{prefix}/lib/fbi/slide-images/freebsd'.format(prefix=sys.prefix), ['src/slide-images/freebsd/photo.png']),
('{prefix}/lib/fbi/slide-images/freebsd'.format(prefix=sys.prefix), ['src/slide-images/freebsd/social.png']),
('{prefix}/lib/fbi/slide-images/freebsd'.format(prefix=sys.prefix), ['src/slide-images/freebsd/software.png']),
('{prefix}/lib/fbi/slide-images/freebsd'.format(prefix=sys.prefix), ['src/slide-images/freebsd/customize.png']),
('{prefix}/lib/fbi/slide-images/freebsd'.format(prefix=sys.prefix), ['src/slide-images/freebsd/welcome.png']),
('{prefix}/lib/fbi/timezone'.format(prefix=sys.prefix), ['src/timezone/continent']),
('{prefix}/lib/fbi/timezone/city'.format(prefix=sys.prefix), ['src/timezone/city/Africa']),
('{prefix}/lib/fbi/timezone/city'.format(prefix=sys.prefix), ['src/timezone/city/America']),
('{prefix}/lib/fbi/timezone/city'.format(prefix=sys.prefix), ['src/timezone/city/Antarctica']),
('{prefix}/lib/fbi/timezone/city'.format(prefix=sys.prefix), ['src/timezone/city/Arctic']),
('{prefix}/lib/fbi/timezone/city'.format(prefix=sys.prefix), ['src/timezone/city/Asia']),
('{prefix}/lib/fbi/timezone/city'.format(prefix=sys.prefix), ['src/timezone/city/Atlantic']),
('{prefix}/lib/fbi/timezone/city'.format(prefix=sys.prefix), ['src/timezone/city/Australia']),
('{prefix}/lib/fbi/timezone/city'.format(prefix=sys.prefix), ['src/timezone/city/Europe']),
('{prefix}/lib/fbi/timezone/city'.format(prefix=sys.prefix), ['src/timezone/city/Indian']),
('{prefix}/lib/fbi/timezone/city'.format(prefix=sys.prefix), ['src/timezone/city/Pacific']),
]
data_files.extend(datafilelist('{prefix}/share/locale'.format(prefix=sys.prefix), 'build/mo'))
setup(name="fbi",
version=PROGRAM_VERSION,
description="FBI is the FreeBSD front end user interface for pc-sysinstall",
license='BSD',
author='Eric Turgeon',
url='https://github/wallisonalves/fbi/',
package_dir={'': '.'},
data_files=data_files,
# install_requires = [ 'setuptools', ],
scripts=['fbi'],)
| 81.965714
| 116
| 0.686768
| 1,932
| 14,344
| 5.086439
| 0.117495
| 0.21675
| 0.212171
| 0.297039
| 0.872596
| 0.86934
| 0.864455
| 0.864455
| 0.864455
| 0.80523
| 0
| 0.000225
| 0.071389
| 14,344
| 174
| 117
| 82.436782
| 0.737538
| 0.00976
| 0
| 0
| 0
| 0
| 0.541658
| 0.490668
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006135
| false
| 0
| 0.018405
| 0
| 0.030675
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
3ef076bd1f4dc85adf7ca89b0cfaca513c491079
| 3,986
|
py
|
Python
|
TeamProjDjango/src/HomePage/models.py
|
chynggi/RuningMan
|
cfb9c2e7ed3afb1edd56ed16ae09ca38ead059a1
|
[
"Apache-2.0"
] | 1
|
2019-12-09T03:02:57.000Z
|
2019-12-09T03:02:57.000Z
|
TeamProjDjango/src/HomePage/models.py
|
chynggi/RuningMan
|
cfb9c2e7ed3afb1edd56ed16ae09ca38ead059a1
|
[
"Apache-2.0"
] | 3
|
2021-12-18T18:21:17.000Z
|
2022-01-04T16:35:53.000Z
|
TeamProjDjango/src/HomePage/models.py
|
chynggi/LearningMan
|
cfb9c2e7ed3afb1edd56ed16ae09ca38ead059a1
|
[
"Apache-2.0"
] | null | null | null |
# This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Make sure each ForeignKey has `on_delete` set to the desired behavior.
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
from __future__ import unicode_literals
from django.db import models
class Frboard(models.Model):
no = models.FloatField(primary_key=True, serialize=False)
title = models.CharField(max_length=100, blank=True, null=True)
content = models.CharField(max_length=3000, blank=True, null=True)
id = models.ForeignKey('Buser', models.DO_NOTHING, db_column='id', blank=True, null=True)
xdate = models.DateField(blank=True, null=True,auto_now_add=True)
class Meta:
managed = False
db_table = 'frboard'
class Ssboard(models.Model):
no = models.FloatField(primary_key=True, serialize=False)
title = models.CharField(max_length=100, blank=True, null=True)
content = models.CharField(max_length=3000, blank=True, null=True)
id = models.ForeignKey('Buser', models.DO_NOTHING, db_column='id', blank=True, null=True)
xdate = models.DateField(blank=True, null=True,auto_now_add=True)
class Meta:
managed = False
db_table = 'ssboard'
class Oopboard(models.Model):
no = models.FloatField(primary_key=True, serialize=False)
title = models.CharField(max_length=100, blank=True, null=True)
content = models.CharField(max_length=3000, blank=True, null=True)
id = models.ForeignKey('Buser', models.DO_NOTHING, db_column='id', blank=True, null=True)
xdate = models.DateField(blank=True, null=True,auto_now_add=True)
class Meta:
managed = False
db_table = 'oopboard'
class Daboard(models.Model):
no = models.FloatField(primary_key=True, serialize=False)
title = models.CharField(max_length=100, blank=True, null=True)
content = models.CharField(max_length=3000, blank=True, null=True)
id = models.ForeignKey('Buser', models.DO_NOTHING, db_column='id', blank=True, null=True)
xdate = models.DateField(blank=True, null=True,auto_now_add=True)
class Meta:
managed = False
db_table = 'daboard'
class Dbmsboard(models.Model):
no = models.FloatField(primary_key=True, serialize=False)
title = models.CharField(max_length=100, blank=True, null=True)
content = models.CharField(max_length=3000, blank=True, null=True)
id = models.ForeignKey('Buser', models.DO_NOTHING, db_column='id', blank=True, null=True)
xdate = models.DateField(blank=True, null=True,auto_now_add=True)
class Meta:
managed = False
db_table = 'dbmsboard'
class Board(models.Model):
no = models.FloatField(primary_key=True, serialize=False)
title = models.CharField(max_length=100, blank=True, null=True)
content = models.CharField(max_length=3000, blank=True, null=True)
id = models.ForeignKey('Buser', models.DO_NOTHING, db_column='id', blank=True, null=True)
xdate = models.DateField(blank=True, null=True,auto_now_add=True)
class Meta:
managed = False
db_table = 'board'
class Buser(models.Model):
id = models.CharField(primary_key=True, max_length=50)
pw = models.CharField(max_length=150, blank=True, null=True)
name = models.CharField(max_length=80, blank=True, null=True)
phone = models.CharField(max_length=13, blank=True, null=True)
xdate = models.DateField(blank=True, null=True,auto_now_add=True)
class Meta:
managed = False
db_table = 'buser'
| 41.957895
| 105
| 0.672353
| 534
| 3,986
| 4.898876
| 0.183521
| 0.09633
| 0.139144
| 0.181957
| 0.729358
| 0.729358
| 0.729358
| 0.729358
| 0.729358
| 0.729358
| 0
| 0.016372
| 0.218515
| 3,986
| 94
| 106
| 42.404255
| 0.823435
| 0.113648
| 0
| 0.692308
| 1
| 0
| 0.026231
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.030769
| 0
| 0.784615
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
410f30db4acd77bc7b306c7c51de62c510206e37
| 8,051
|
py
|
Python
|
msgraph-cli-extensions/beta/financials_beta/azext_financials_beta/generated/_client_factory.py
|
thewahome/msgraph-cli
|
33127d9efa23a0e5f5303c93242fbdbb73348671
|
[
"MIT"
] | null | null | null |
msgraph-cli-extensions/beta/financials_beta/azext_financials_beta/generated/_client_factory.py
|
thewahome/msgraph-cli
|
33127d9efa23a0e5f5303c93242fbdbb73348671
|
[
"MIT"
] | null | null | null |
msgraph-cli-extensions/beta/financials_beta/azext_financials_beta/generated/_client_factory.py
|
thewahome/msgraph-cli
|
33127d9efa23a0e5f5303c93242fbdbb73348671
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
def cf_financials_beta_cl(cli_ctx, *_):
from msgraph.cli.core.commands.client_factory import get_mgmt_service_client
from azext_financials_beta.vendored_sdks.financials import Financials
return get_mgmt_service_client(cli_ctx,
Financials,
subscription_bound=False,
base_url_bound=False)
def cf_financial_financial(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_financials
def cf_financial(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials
def cf_financial_company(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies
def cf_financial_company_customer_payment_journal(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_customer_payment_journals
def cf_financial_company_customer_payment_journal_customer_payment(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_customer_payment_journals_customer_payments
def cf_financial_company_customer_payment_journal_customer_payment_customer(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_customer_payment_journals_customer_payments_customer
def cf_financial_company_customer_payment(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_customer_payments
def cf_financial_company_customer_payment_customer(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_customer_payments_customer
def cf_financial_company_customer(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_customers
def cf_financial_company_dimension(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_dimensions
def cf_financial_company_employee(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_employees
def cf_financial_company_general_ledger_entry(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_general_ledger_entries
def cf_financial_company_item(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_items
def cf_financial_company_journal_line(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_journal_lines
def cf_financial_company_journal(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_journals
def cf_financial_company_journal_journal_line(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_journals_journal_lines
def cf_financial_company_purchase_invoice_line(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_purchase_invoice_lines
def cf_financial_company_purchase_invoice_line_item(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_purchase_invoice_lines_item
def cf_financial_company_purchase_invoice(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_purchase_invoices
def cf_financial_company_purchase_invoice_purchase_invoice_line(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_purchase_invoices_purchase_invoice_lines
def cf_financial_company_purchase_invoice_purchase_invoice_line_item(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_purchase_invoices_purchase_invoice_lines_item
def cf_financial_company_purchase_invoice_vendor(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_purchase_invoices_vendor
def cf_financial_company_sale_credit_memo_line(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_sales_credit_memo_lines
def cf_financial_company_sale_credit_memo_line_item(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_sales_credit_memo_lines_item
def cf_financial_company_sale_credit_memo(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_sales_credit_memos
def cf_financial_company_sale_credit_memo_customer(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_sales_credit_memos_customer
def cf_financial_company_sale_credit_memo_sale_credit_memo_line(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_sales_credit_memos_sales_credit_memo_lines
def cf_financial_company_sale_credit_memo_sale_credit_memo_line_item(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_sales_credit_memos_sales_credit_memo_lines_item
def cf_financial_company_sale_invoice_line(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_sales_invoice_lines
def cf_financial_company_sale_invoice_line_item(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_sales_invoice_lines_item
def cf_financial_company_sale_invoice(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_sales_invoices
def cf_financial_company_sale_invoice_customer(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_sales_invoices_customer
def cf_financial_company_sale_invoice_sale_invoice_line(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_sales_invoices_sales_invoice_lines
def cf_financial_company_sale_invoice_sale_invoice_line_item(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_sales_invoices_sales_invoice_lines_item
def cf_financial_company_sale_order_line(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_sales_order_lines
def cf_financial_company_sale_order_line_item(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_sales_order_lines_item
def cf_financial_company_sale_order(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_sales_orders
def cf_financial_company_sale_order_customer(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_sales_orders_customer
def cf_financial_company_sale_order_sale_order_line(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_sales_orders_sales_order_lines
def cf_financial_company_sale_order_sale_order_line_item(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_sales_orders_sales_order_lines_item
def cf_financial_company_sale_quote_line(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_sales_quote_lines
def cf_financial_company_sale_quote_line_item(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_sales_quote_lines_item
def cf_financial_company_sale_quote(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_sales_quotes
def cf_financial_company_sale_quote_customer(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_sales_quotes_customer
def cf_financial_company_sale_quote_sale_quote_line(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_sales_quotes_sales_quote_lines
def cf_financial_company_sale_quote_sale_quote_line_item(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_sales_quotes_sales_quote_lines_item
def cf_financial_company_vendor(cli_ctx, *_):
return cf_financials_beta_cl(cli_ctx).financials_companies_vendors
| 38.89372
| 116
| 0.816917
| 1,102
| 8,051
| 5.285844
| 0.088929
| 0.098884
| 0.131845
| 0.148326
| 0.88309
| 0.874678
| 0.842403
| 0.828498
| 0.79588
| 0.704206
| 0
| 0
| 0.113278
| 8,051
| 206
| 117
| 39.082524
| 0.815941
| 0.054527
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.475248
| false
| 0
| 0.019802
| 0.465347
| 0.970297
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
eb07a897011687b914773168cc24cc83a330acae
| 134
|
py
|
Python
|
src/sage/misc/interpreter.py
|
switzel/sage
|
7eb8510dacf61b691664cd8f1d2e75e5d473e5a0
|
[
"BSL-1.0"
] | null | null | null |
src/sage/misc/interpreter.py
|
switzel/sage
|
7eb8510dacf61b691664cd8f1d2e75e5d473e5a0
|
[
"BSL-1.0"
] | null | null | null |
src/sage/misc/interpreter.py
|
switzel/sage
|
7eb8510dacf61b691664cd8f1d2e75e5d473e5a0
|
[
"BSL-1.0"
] | 1
|
2020-07-24T12:20:37.000Z
|
2020-07-24T12:20:37.000Z
|
from sage.misc.lazy_import import lazy_import
lazy_import('sage.repl.interpreter', '_do_preparse', 'do_preparse', deprecation=17460)
| 33.5
| 86
| 0.813433
| 19
| 134
| 5.421053
| 0.578947
| 0.291262
| 0.31068
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04
| 0.067164
| 134
| 3
| 87
| 44.666667
| 0.784
| 0
| 0
| 0
| 0
| 0
| 0.328358
| 0.156716
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
eb2a49810c7d21473cacc41845e3ba920ab2c64d
| 59
|
py
|
Python
|
safety_gym/envs/__init__.py
|
krishpop/safety-gym
|
7e055dae7c98b52c2fa521d878d9091c9bbc72d5
|
[
"MIT"
] | null | null | null |
safety_gym/envs/__init__.py
|
krishpop/safety-gym
|
7e055dae7c98b52c2fa521d878d9091c9bbc72d5
|
[
"MIT"
] | null | null | null |
safety_gym/envs/__init__.py
|
krishpop/safety-gym
|
7e055dae7c98b52c2fa521d878d9091c9bbc72d5
|
[
"MIT"
] | null | null | null |
import safety_gym.envs.suite
import safety_gym.envs.custom
| 19.666667
| 29
| 0.864407
| 10
| 59
| 4.9
| 0.6
| 0.489796
| 0.612245
| 0.77551
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067797
| 59
| 2
| 30
| 29.5
| 0.890909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
de18a1105a80a5c58236a6d64476e23622e96cf9
| 279
|
py
|
Python
|
2017/workshop/inputs.py
|
jkalmar/PythonDays
|
9af6ec6b8e919af0c789e7e01a7486536ef61ff3
|
[
"MIT"
] | null | null | null |
2017/workshop/inputs.py
|
jkalmar/PythonDays
|
9af6ec6b8e919af0c789e7e01a7486536ef61ff3
|
[
"MIT"
] | null | null | null |
2017/workshop/inputs.py
|
jkalmar/PythonDays
|
9af6ec6b8e919af0c789e7e01a7486536ef61ff3
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
""" Interpreter """
testInput1 = "010100100100"
testInput2 = """001000100010001000100010001000100110000000100010001000100010001000100010000100110111000000100100"""
testInput3 = "00100010001000100010001001100000100010100001001101110000001000100010001000100100"
| 39.857143
| 115
| 0.863799
| 10
| 279
| 24.1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.720755
| 0.050179
| 279
| 6
| 116
| 46.5
| 0.188679
| 0.103943
| 0
| 0
| 0
| 0
| 0.773663
| 0.72428
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
72200941fd4d803146894d59b2e1a93797ae765e
| 30,956
|
py
|
Python
|
NYUD/solver.py
|
pmorerio/admd
|
84e7e9ba180e7fd45c3548b1cbc6abcf1495b3f2
|
[
"MIT"
] | 12
|
2018-10-27T16:40:12.000Z
|
2020-09-13T00:44:21.000Z
|
NYUD/solver.py
|
ncgarcia/admd
|
9a821e7d1aac15b1c613bb8babdf0ccbff42ac78
|
[
"MIT"
] | 14
|
2019-10-09T13:00:52.000Z
|
2021-01-18T08:51:13.000Z
|
NYUD/solver.py
|
pmorerio/admd
|
84e7e9ba180e7fd45c3548b1cbc6abcf1495b3f2
|
[
"MIT"
] | 2
|
2018-10-19T12:29:48.000Z
|
2021-04-06T03:06:49.000Z
|
import tensorflow as tf
import tensorflow.contrib.slim as slim
import numpy as np
import os
import glob
from PIL import Image
import cPickle
class Solver(object):
def __init__(self, model, batch_size=4, train_iter=20000, train_iter_adv=200000, log_dir='logs',
model_save_path='model',
resnet50_ckpt='/data/models/resnet_50/',
image_dir='/data/datasets/NYUD_multimodal'):
self.model = model
self.batch_size = batch_size
self.train_iter = train_iter
self.train_iter_adv = train_iter_adv
self.log_dir = os.path.join(log_dir, self.model.mode)
self.model_save_path = model_save_path
self.config = tf.ConfigProto()
self.config.gpu_options.allow_growth = False
self.config.allow_soft_placement = True
self.resnet50_ckpt = resnet50_ckpt
self.no_classes = model.no_classes
self.image_dir = image_dir
self.load_NYUD()
def load_NYUD(self):
print ('Loading NYUD dataset')
RGB_MEAN = [123.68, 116.779, 103.939]
splits = ['train', 'test']
self.n_samples = {'train': 2186, 'test': 2401}
self.dataset = {}
for split in splits:
print(split)
rgb_classes = sorted(
glob.glob(self.image_dir + '/' + split + '/images/*'))
depth_classes = sorted(
glob.glob(self.image_dir + '/' + split + '/depth/*'))
assert len(rgb_classes) == len(depth_classes)
assert len(rgb_classes) == self.model.no_classes
rgb_images = np.zeros((self.n_samples[split], 224, 224, 3))
depth_images = np.zeros((self.n_samples[split], 224, 224, 3))
labels = np.zeros((self.n_samples[split], 1))
l = 0
c = 0
for rgb_class_path, depth_class_path in zip(rgb_classes, depth_classes):
rgb_images_list = sorted(glob.glob(rgb_class_path + '/*'))
depth_images_list = sorted(glob.glob(depth_class_path + '/*'))
assert len(rgb_images_list) == len(depth_images_list)
# ~ #print str(l)+'/'+str(len(obj_categories))
for rgb_image, depth_image in zip(rgb_images_list, depth_images_list):
img = Image.open(rgb_image)
img = img.resize((224, 224), Image.ANTIALIAS)
img = np.array(img, dtype=float)
img[:, :, 0] -= RGB_MEAN[0]
img[:, :, 1] -= RGB_MEAN[1]
img[:, :, 2] -= RGB_MEAN[2]
img = np.expand_dims(img, axis=0)
rgb_images[c] = img
# same processing for HHA-encoded images
img = Image.open(depth_image)
img = img.resize((224, 224), Image.ANTIALIAS)
img = np.array(img, dtype=float)
img[:, :, 0] -= RGB_MEAN[0]
img[:, :, 1] -= RGB_MEAN[1]
img[:, :, 2] -= RGB_MEAN[2]
img = np.expand_dims(img, axis=0)
depth_images[c] = img
labels[c] = l
c += 1
l += 1
rnd_indices = np.arange(len(labels))
np.random.seed(231)
np.random.shuffle(rnd_indices)
rgb_images = rgb_images[rnd_indices]
depth_images = depth_images[rnd_indices]
labels = labels[rnd_indices]
self.dataset[split] = {
'rgb_images': rgb_images, 'depth_images': depth_images, 'labels': np.squeeze(labels)}
print('Loaded!')
def eval_all_single_stream(self, split, modality, session):
modality = 'rgb' if 'rgb' in modality else modality
# is_training = False for testing to be fair
batches_per_epochs = int(
self.dataset[split]['labels'].shape[0] / self.batch_size) + 1
#print('Evaluating '+split+' accuracy')
correct_preds = 0.
for _im, _lab, in zip(np.array_split(self.dataset[split][modality + '_images'], batches_per_epochs),
np.array_split(
self.dataset[split]['labels'], batches_per_epochs),
):
feed_dict = {self.model.images: _im, self.model.labels: _lab,
self.model.is_training: split == 'train'}
_acc_ = session.run(fetches=self.model.accuracy,
feed_dict=feed_dict)
# must be a weighted average since last split is smaller
correct_preds += (_acc_ * len(_lab))
print (modality + ' ' + split
+ ' acc [%.4f]' % (correct_preds / len(self.dataset[split]['labels'])))
def train_single_stream(self, modality):
# build a graph
self.model.build_model()
with tf.Session(config=self.config) as sess:
tf.global_variables_initializer().run()
print ('---Do not forget to rename the variables in the orginal resnet checkpoint if you are training for the first time')
print ('---Run rename_ckpt.sh')
print ('Loading pretrained ' + modality + '/resnet50...')
variables_to_restore = slim.get_model_variables(
scope=modality + '/resnet_v1_50')
# get rid of logits
variables_to_restore = [
vv for vv in variables_to_restore if 'logits' not in vv.name]
variables_to_restore = [
vv for vv in variables_to_restore if 'f_repr' not in vv.name]
restorer = tf.train.Saver(variables_to_restore)
restorer.restore(sess, self.resnet50_ckpt +
'resnet_v1_50_' + modality + '.ckpt')
print('Loaded!')
saver = tf.train.Saver()
summary_writer = tf.summary.FileWriter(
logdir=self.log_dir, graph=tf.get_default_graph())
# the +1 gives an additional smaller batch
batches_per_epochs = int(
self.dataset['train']['labels'].shape[0] / self.batch_size) + 1
for step in range(self.train_iter):
i = step % batches_per_epochs
feed_dict = {self.model.images: self.dataset['train'][modality + '_images'][i * self.batch_size:(i + 1) * self.batch_size],
self.model.labels: self.dataset['train']['labels'][i * self.batch_size:(i + 1) * self.batch_size],
self.model.is_training: True}
if step % 10 == 0:
summary, l, acc = sess.run(
[self.model.summary_op, self.model.loss, self.model.accuracy], feed_dict)
summary_writer.add_summary(summary, step)
print ('Step: [%d/%d] loss: [%.6f] acc: [%.6f] '
% (step, self.train_iter, l, acc))
if i == 0:
# Eval on train
self.eval_all_single_stream(
'train', session=sess, modality=modality)
self.eval_all_single_stream(
'test', session=sess, modality=modality)
saver.save(sess, os.path.join(
self.model_save_path, modality))
sess.run(self.model.train_op, feed_dict)
def test_single_stream(self, modality):
# build a graph
self.model.build_model()
with tf.Session(config=self.config) as sess:
tf.global_variables_initializer().run()
print ('Loading ' + modality + '/resnet50...')
variables_to_restore = slim.get_model_variables(
scope=modality + '/resnet_v1_50')
restorer = tf.train.Saver(variables_to_restore)
restorer.restore(sess, os.path.join(
self.model_save_path, modality))
print('Loaded!')
#~ self.eval_all_single_stream('train', session=sess, modality=modality)
self.eval_all_single_stream(
'test', session=sess, modality=modality)
def eval_all_double_stream(self, split, session, noise=0.):
# is_training = False for testing to be fair
batches_per_epochs = int(
self.dataset[split]['labels'].shape[0] / self.batch_size) + 1
#print('Evaluating '+split+' accuracy')
correct_preds = 0.
for rgb_im, depth_im, _lab, in zip(np.array_split(self.dataset[split]['rgb_images'], batches_per_epochs),
np.array_split(
self.dataset[split]['depth_images'], batches_per_epochs),
np.array_split(
self.dataset[split]['labels'], batches_per_epochs),
):
if noise > 0.:
depth_im = depth_im * \
np.random.normal(1, noise, size=depth_im.shape)
elif noise < 0.:
depth_im = np.zeros(shape=depth_im.shape)
feed_dict = {self.model.rgb_images: rgb_im, self.model.depth_images: depth_im,
self.model.labels: _lab, self.model.is_training: split == 'train'}
_acc_ = session.run(fetches=self.model.accuracy,
feed_dict=feed_dict)
# must be a weighted average since last split is smaller
correct_preds += (_acc_ * len(_lab))
print (split + ' acc [%.4f]' %
(correct_preds / len(self.dataset[split]['labels'])))
def train_double_stream(self):
# build a graph
self.model.build_model()
with tf.Session(config=self.config) as sess:
tf.global_variables_initializer().run()
for modality in ['rgb', 'depth']:
print ('Loading pretrained ' + modality + '/resnet50...')
variables_to_restore = slim.get_model_variables(
scope=modality + '/resnet_v1_50')
restorer = tf.train.Saver(variables_to_restore)
restorer.restore(sess, os.path.join(
self.model_save_path, modality))
print('Loaded!')
saver = tf.train.Saver()
summary_writer = tf.summary.FileWriter(
logdir=self.log_dir, graph=tf.get_default_graph())
# the +1 gives an additional smaller batch
batches_per_epochs = int(
self.dataset['train']['labels'].shape[0] / self.batch_size) + 1
for step in range(self.train_iter):
i = step % batches_per_epochs
feed_dict = {self.model.rgb_images: self.dataset['train']['rgb_images'][i * self.batch_size:(i + 1) * self.batch_size],
self.model.depth_images: self.dataset['train']['depth_images'][i * self.batch_size:(i + 1) * self.batch_size],
self.model.labels: self.dataset['train']['labels'][i * self.batch_size:(i + 1) * self.batch_size],
self.model.is_training: True}
if step % 10 == 0:
summary, l, acc = sess.run(
[self.model.summary_op, self.model.loss, self.model.accuracy], feed_dict)
summary_writer.add_summary(summary, step)
print ('Step: [%d/%d] loss: [%.6f] acc: [%.6f] '
% (step, self.train_iter, l, acc))
if i == 0:
# Eval on train
#~ self.eval_all_double_stream('train', session=sess)
self.eval_all_double_stream('test', session=sess)
model_name = 'double_stream_moddrop' if 'moddrop' in self.model.mode else 'double_stream'
saver.save(sess, os.path.join(
self.model_save_path, model_name))
sess.run(self.model.train_op, feed_dict)
def test_ensemble_baseline(self):
# build a graph
self.model.build_model()
with tf.Session(config=self.config) as sess:
tf.global_variables_initializer().run()
for modality in ['rgb', 'rgb1']:
print ('Loading pretrained ' + modality + '/resnet50...')
variables_to_restore = slim.get_model_variables(
scope=modality + '/resnet_v1_50')
restorer = tf.train.Saver(variables_to_restore)
restorer.restore(sess, os.path.join(
self.model_save_path, modality))
print('Loaded!')
#~ self.eval_all_double_stream('train', session=sess)
self.eval_all_double_stream('test', session=sess)
def train_hallucination(self):
# build a graph
self.model.build_model()
with tf.Session(config=self.config) as sess:
tf.global_variables_initializer().run()
# load depth only
print ('Loading pretrained double_stream model...')
variables_to_restore = slim.get_model_variables(
scope='depth/resnet_v1_50')
restorer = tf.train.Saver(variables_to_restore)
restorer.restore(sess, os.path.join(
self.model_save_path, 'double_stream'))
print('Loaded!')
# re-initialize the hall strem with values from the depth
print ('Copying depth to hallucination...')
hall_vars = [var for var in tf.global_variables()
if 'hall' in var.name]
depth_vars = [var for var in tf.global_variables()
if 'depth' in var.name]
for hvar, dvar in zip(hall_vars, depth_vars):
# ~ print('assigning from \t'+dvar.name)
# ~ print('to \t\t'+hvar.name)
assign_op = hvar.assign(dvar)
sess.run(assign_op)
print('Done!')
saver = tf.train.Saver()
summary_writer = tf.summary.FileWriter(
logdir=self.log_dir, graph=tf.get_default_graph())
# the +1 gives an additional smaller batch
batches_per_epochs = int(
self.dataset['train']['labels'].shape[0] / self.batch_size) + 1
print(batches_per_epochs, 'batches (i.e. iterations) per epoch')
for step in range(self.train_iter_adv):
i = step % batches_per_epochs
feed_dict = {self.model.rgb_images: self.dataset['train']['rgb_images'][i * self.batch_size:(i + 1) * self.batch_size],
self.model.depth_images: self.dataset['train']['depth_images'][i * self.batch_size:(i + 1) * self.batch_size],
self.model.labels: self.dataset['train']['labels'][i * self.batch_size:(i + 1) * self.batch_size],
self.model.is_training: True}
if i == 0:
# ~ if step%500==0:
summary, d_loss, g_loss, logits_real, logits_fake = sess.run([self.model.summary_op, self.model.d_loss, self.model.g_loss,
self.model.logits_real, self.model.logits_fake], feed_dict)
summary_writer.add_summary(summary, step)
print ('Step: [%d/%d] d_loss: [%.6f] g_loss: [%.6f] \n\t\t logits_real: [%.6f] logits fake: [%.6f] '
% (step, self.train_iter_adv, d_loss, g_loss, np.mean(logits_real), np.mean(logits_fake)))
# ~ if i==0:
# Eval on train
#self.eval_all_double_stream('train', session=sess)
self.eval_all_double_stream('test', session=sess)
saver.save(sess, os.path.join(
self.model_save_path, 'hallucination'))
sess.run(self.model.d_train_op, feed_dict)
sess.run(self.model.g_train_op, feed_dict)
def finetune_hallucination(self):
# build a graph
self.model.build_model()
with tf.Session(config=self.config) as sess:
tf.global_variables_initializer().run()
# load depth only
print ('Loading pretrained hallucination model...')
variables_to_restore = slim.get_model_variables(
scope='hall/resnet_v1_50')
restorer = tf.train.Saver(variables_to_restore)
restorer.restore(sess, os.path.join(
self.model_save_path, 'hallucination'))
print('Loaded!')
print ('Loading rgb stream from pretrained double_stream model...')
variables_to_restore = slim.get_model_variables(
scope='rgb/resnet_v1_50')
restorer = tf.train.Saver(variables_to_restore)
restorer.restore(sess, os.path.join(
self.model_save_path, 'double_stream'))
print('Loaded!')
saver = tf.train.Saver()
summary_writer = tf.summary.FileWriter(
logdir=self.log_dir, graph=tf.get_default_graph())
# the +1 gives an additional smaller batch
batches_per_epochs = int(
self.dataset['train']['labels'].shape[0] / self.batch_size) + 1
for step in range(self.train_iter):
i = step % batches_per_epochs
feed_dict = {self.model.rgb_images: self.dataset['train']['rgb_images'][i * self.batch_size:(i + 1) * self.batch_size],
self.model.labels: self.dataset['train']['labels'][i * self.batch_size:(i + 1) * self.batch_size],
self.model.is_training: True}
if step % 10 == 0:
summary, l, acc = sess.run(
[self.model.summary_op, self.model.loss, self.model.accuracy], feed_dict)
summary_writer.add_summary(summary, step)
print ('Step: [%d/%d] loss: [%.6f] acc: [%.6f] '
% (step, self.train_iter, l, acc))
if i == 0:
# Eval on train
#~ self.eval_all_double_stream('train', session=sess)
self.eval_all_double_stream('test', session=sess)
saver.save(sess, os.path.join(
self.model_save_path, 'hallucination_finetuned'))
sess.run(self.model.train_op, feed_dict)
def test_hallucination(self):
# build a graph
self.model.build_model()
with tf.Session(config=self.config) as sess:
tf.global_variables_initializer().run()
# load depth only
print ('Loading pretrained hallucination model...')
variables_to_restore = slim.get_model_variables(
scope='hall/resnet_v1_50')
restorer = tf.train.Saver(variables_to_restore)
restorer.restore(sess, os.path.join(
self.model_save_path, 'hallucination'))
print('Loaded!')
print ('Loading rgb stream from pretrained double_stream model...')
variables_to_restore = slim.get_model_variables(
scope='rgb/resnet_v1_50')
restorer = tf.train.Saver(variables_to_restore)
restorer.restore(sess, os.path.join(
self.model_save_path, 'double_stream'))
print('Loaded!')
self.eval_all_double_stream('test', session=sess)
def test_moddrop(self, noise=0.):
# build a graph
self.model.build_model()
with tf.Session(config=self.config) as sess:
tf.global_variables_initializer().run()
print ('Loading pretrained double_stream_moddrop model...')
variables_to_restore = slim.get_model_variables(
scope='rgb/resnet_v1_50')
variables_to_restore += slim.get_model_variables(
scope='depth/resnet_v1_50')
restorer = tf.train.Saver(variables_to_restore)
restorer.restore(sess, os.path.join(
self.model_save_path, 'double_stream_moddrop'))
print('Loaded!')
self.eval_all_double_stream('test', session=sess, noise=noise)
def train_autoencoder(self):
# build a graph
self.model.build_model()
with tf.Session(config=self.config) as sess:
tf.global_variables_initializer().run()
modality = 'rgb'
print ('Loading pretrained ' + modality + '/resnet50...')
variables_to_restore = slim.get_model_variables(
scope=modality + '/resnet_v1_50')
restorer = tf.train.Saver(variables_to_restore)
restorer.restore(sess, os.path.join(
self.model_save_path, modality))
print('Loaded!')
saver = tf.train.Saver()
summary_writer = tf.summary.FileWriter(
logdir=self.log_dir, graph=tf.get_default_graph())
# the +1 gives an additional smaller batch
batches_per_epochs = int(
self.dataset['train']['labels'].shape[0] / self.batch_size) + 1
for step in range(self.train_iter):
i = step % batches_per_epochs
feed_dict = {self.model.rgb_images: self.dataset['train']['rgb_images'][i * self.batch_size:(i + 1) * self.batch_size],
self.model.depth_images: self.dataset['train']['depth_images'][i * self.batch_size:(i + 1) * self.batch_size],
self.model.is_training: True}
# ~ if i==0:
if step % 50 == 0:
summary, l = sess.run(
[self.model.summary_op, self.model.loss], feed_dict)
summary_writer.add_summary(summary, step)
print ('Step: [%d/%d] loss: [%.6f]'
% (step, self.train_iter, l))
if i == 0:
saver.save(sess, os.path.join(
self.model_save_path, 'autoencoder'))
sess.run(self.model.train_op, feed_dict)
def test_autoencoder(self):
# build a graph
self.model.build_model()
with tf.Session(config=self.config) as sess:
tf.global_variables_initializer().run()
print ('Loading pretrained autoencoder...')
variables_to_restore = slim.get_model_variables()
restorer = tf.train.Saver(variables_to_restore)
restorer.restore(sess, os.path.join(
self.model_save_path, 'autoencoder'))
print('Loaded!')
summary_writer = tf.summary.FileWriter(
logdir=self.log_dir, graph=tf.get_default_graph())
# the +1 gives an additional smaller batch
batches_per_epochs = int(
self.dataset['test']['labels'].shape[0] / self.batch_size) + 1
# simply visualize on the test set
# ... and save a copy of the generated images
generated_depth_images = np.zeros(
(self.n_samples['test'], 224, 224, 3))
for step in range(batches_per_epochs):
i = step
feed_dict = {self.model.rgb_images: self.dataset['test']['rgb_images'][i * self.batch_size:(i + 1) * self.batch_size],
self.model.depth_images: self.dataset['test']['depth_images'][i * self.batch_size:(i + 1) * self.batch_size],
self.model.is_training: False}
summary, l, gen_depth = sess.run(
[self.model.summary_op, self.model.loss, self.model.reconstructed_depth], feed_dict)
generated_depth_images[i *
self.batch_size:(i + 1) * self.batch_size] = gen_depth
summary_writer.add_summary(summary, step)
print ('Step: [%d/%d] loss: [%.6f]'
% (step, batches_per_epochs, l))
np.save('generated_depth_images', generated_depth_images)
def test_double_stream_with_ae(self):
# build a graph
self.model.build_model()
with tf.Session(config=self.config) as sess:
tf.global_variables_initializer().run()
print ('Loading pretrained double_stream model...')
variables_to_restore = slim.get_model_variables(
scope='rgb/resnet_v1_50')
variables_to_restore += slim.get_model_variables(
scope='depth/resnet_v1_50')
restorer = tf.train.Saver(variables_to_restore)
restorer.restore(sess, os.path.join(
self.model_save_path, 'double_stream'))
print('Loaded!')
generated_depth_images = np.load('generated_depth_images.npy')
# is_training = False for testing to be fair
split = 'test'
batches_per_epochs = int(
self.dataset[split]['labels'].shape[0] / self.batch_size) + 1
correct_preds = 0.
for rgb_im, depth_im, _lab, in zip(np.array_split(self.dataset[split]['rgb_images'], batches_per_epochs),
np.array_split(
generated_depth_images, batches_per_epochs),
np.array_split(
self.dataset[split]['labels'], batches_per_epochs),
):
feed_dict = {self.model.rgb_images: rgb_im, self.model.depth_images: depth_im,
self.model.labels: _lab, self.model.is_training: split == 'train'}
_acc_ = sess.run(fetches=self.model.accuracy,
feed_dict=feed_dict)
# must be a weighted average since last split is smaller
correct_preds += (_acc_ * len(_lab))
print (split + ' acc [%.4f]' %
(correct_preds / len(self.dataset[split]['labels'])))
def test_double_stream(self, noise=0.):
# build a graph
self.model.build_model()
with tf.Session(config=self.config) as sess:
tf.global_variables_initializer().run()
print ('Loading pretrained double_stream model...')
variables_to_restore = slim.get_model_variables(
scope='rgb/resnet_v1_50')
variables_to_restore += slim.get_model_variables(
scope='depth/resnet_v1_50')
restorer = tf.train.Saver(variables_to_restore)
restorer.restore(sess, os.path.join(
self.model_save_path, 'double_stream'))
print('Loaded!')
self.eval_all_double_stream('test', session=sess, noise=noise)
def test_disc(self, noise=0.):
# build a graph
self.model.build_model()
with tf.Session(config=self.config) as sess:
#~ tf.global_variables_initializer().run()
print ('Loading pretrained double_stream model...')
variables_to_restore = slim.get_model_variables(
scope='rgb/resnet_v1_50')
variables_to_restore += slim.get_model_variables(
scope='depth/resnet_v1_50')
restorer = tf.train.Saver(variables_to_restore)
restorer.restore(sess, os.path.join(
self.model_save_path, 'double_stream'))
print('Loaded!')
print ('Loading pretrained discriminator...')
variables_to_restore = slim.get_model_variables(scope='disc')
restorer = tf.train.Saver(variables_to_restore)
restorer.restore(sess, os.path.join(
self.model_save_path, 'hallucination'))
print('Loaded!')
self.eval_all_double_stream('test', session=sess, noise=noise)
def train_eccv(self):
# build a graph
self.model.build_model()
with tf.Session(config=self.config) as sess:
tf.global_variables_initializer().run()
# load depth only
print ('Loading pretrained double_stream model...')
variables_to_restore = slim.get_model_variables(
scope='depth/resnet_v1_50')
restorer = tf.train.Saver(variables_to_restore)
restorer.restore(sess, os.path.join(
self.model_save_path, 'double_stream'))
print('Loaded!')
# re-initialize the hall stream with values from the depth
print ('Copying depth to hallucination...')
hall_vars = [var for var in tf.global_variables()
if 'hall' in var.name]
depth_vars = [var for var in tf.global_variables()
if 'depth' in var.name]
for hvar, dvar in zip(hall_vars, depth_vars):
# ~ print('assigning from \t'+dvar.name)
# ~ print('to \t\t'+hvar.name)
assign_op = hvar.assign(dvar)
sess.run(assign_op)
print('Done!')
saver = tf.train.Saver(max_to_keep=3)
summary_writer = tf.summary.FileWriter(
logdir=self.log_dir, graph=tf.get_default_graph())
# the +1 gives an additional smaller batch
batches_per_epochs = int(
self.dataset['train']['labels'].shape[0] / self.batch_size) + 1
print(batches_per_epochs, 'batches (i.e. iterations) per epoch')
for step in range(self.train_iter_adv):
i = step % batches_per_epochs
feed_dict = {self.model.rgb_images: self.dataset['train']['rgb_images'][i * self.batch_size:(i + 1) * self.batch_size],
self.model.depth_images: self.dataset['train']['depth_images'][i * self.batch_size:(i + 1) * self.batch_size],
self.model.labels: self.dataset['train']['labels'][i * self.batch_size:(i + 1) * self.batch_size],
self.model.is_training: True}
# if i == 0:
if step % 500 == 0:
summary, l, acc = sess.run(
[self.model.summary_op, self.model.loss, self.model.accuracy], feed_dict)
summary_writer.add_summary(summary, step)
print ('Step: [%d/%d] loss: [%.6f] acc: [%.6f] '
% (step, self.train_iter, l, acc))
if i == 0:
# Eval on train
print('step %d ' % step)
#self.eval_all_double_stream('train', session=sess)
self.eval_all_double_stream('test', session=sess)
saver.save(
sess, os.path.join(self.model_save_path, 'hallucination_eccv'), global_step=step)
sess.run(self.model.train_op, feed_dict)
if __name__ == '__main__':
print('Empty')
| 44.669553
| 142
| 0.552462
| 3,587
| 30,956
| 4.536103
| 0.072484
| 0.060291
| 0.037551
| 0.02403
| 0.831233
| 0.822814
| 0.820478
| 0.814332
| 0.797923
| 0.793375
| 0
| 0.01336
| 0.335056
| 30,956
| 692
| 143
| 44.734104
| 0.777108
| 0.059181
| 0
| 0.705765
| 0
| 0.003976
| 0.094832
| 0.006435
| 0
| 0
| 0
| 0
| 0.005964
| 1
| 0.035785
| false
| 0
| 0.013917
| 0
| 0.05169
| 0.11332
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7234b10408bbcd4e4af64daf5f30a8f7bbea0e17
| 41
|
py
|
Python
|
xpp/plans/__init__.py
|
pcdshub/xpp
|
aaf46d7e4b1dffd4619ebc6c6194978938bb3fed
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
xpp/plans/__init__.py
|
pcdshub/xpp
|
aaf46d7e4b1dffd4619ebc6c6194978938bb3fed
|
[
"BSD-3-Clause-LBNL"
] | 1
|
2020-04-07T20:31:58.000Z
|
2020-04-07T20:31:58.000Z
|
xpp/plans/__init__.py
|
pcdshub/xpp
|
aaf46d7e4b1dffd4619ebc6c6194978938bb3fed
|
[
"BSD-3-Clause-LBNL"
] | 2
|
2018-10-30T23:16:03.000Z
|
2020-04-07T20:04:51.000Z
|
from .serp_seq_scan import serp_seq_scan
| 20.5
| 40
| 0.878049
| 8
| 41
| 4
| 0.625
| 0.4375
| 0.6875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 41
| 1
| 41
| 41
| 0.864865
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
723832a84907154b56e74f4436a5073ba7050fb6
| 56
|
py
|
Python
|
vectors/__init__.py
|
Cvaniak/vectors
|
887ecbe78175a23be8cc4fa8539a917df02d0bbb
|
[
"MIT"
] | 22
|
2015-03-18T08:30:12.000Z
|
2021-11-09T11:28:08.000Z
|
vectors/__init__.py
|
Cvaniak/vectors
|
887ecbe78175a23be8cc4fa8539a917df02d0bbb
|
[
"MIT"
] | 16
|
2016-07-04T08:04:51.000Z
|
2022-01-20T21:30:15.000Z
|
vectors/__init__.py
|
Cvaniak/vectors
|
887ecbe78175a23be8cc4fa8539a917df02d0bbb
|
[
"MIT"
] | 23
|
2015-11-17T08:39:05.000Z
|
2022-01-17T14:08:59.000Z
|
from .vectors import Point
from .vectors import Vector
| 18.666667
| 27
| 0.803571
| 8
| 56
| 5.625
| 0.625
| 0.488889
| 0.755556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160714
| 56
| 2
| 28
| 28
| 0.957447
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9d01a352d817b0ab838062bde423bc3f227324e3
| 4,392
|
py
|
Python
|
src/test/python/test_query/test_rsql.py
|
ettoreleandrotognoli/py-storage
|
d19a0ccbbb23aed818461775a9fc3cd7c5cc25fa
|
[
"Apache-2.0"
] | null | null | null |
src/test/python/test_query/test_rsql.py
|
ettoreleandrotognoli/py-storage
|
d19a0ccbbb23aed818461775a9fc3cd7c5cc25fa
|
[
"Apache-2.0"
] | 4
|
2021-04-13T20:43:11.000Z
|
2021-04-13T22:47:56.000Z
|
src/test/python/test_query/test_rsql.py
|
ettoreleandrotognoli/py-storage
|
d19a0ccbbb23aed818461775a9fc3cd7c5cc25fa
|
[
"Apache-2.0"
] | null | null | null |
from unittest import TestCase
from storage.query.rsql import parse
class RSQLTest(TestCase):
def test_parse_true_boolean(self):
rsql = 'true==t'
parsed = parse(rsql)
self.assertTrue(parsed(None))
def test_parse_false_boolean(self):
rsql = 'false==f'
parsed = parse(rsql)
self.assertTrue(parsed(None))
def test_const_number_symbolic_eq_const_number_when_eq_should_be_true(self):
rsql = '1==1'
parsed = parse(rsql)
self.assertTrue(parsed(None))
def test_const_symbolic_eq_const_when_ne_should_be_false(self):
rsql = '1==0'
parsed = parse(rsql)
self.assertFalse(parsed(None))
def test_const_eq_const_when_eq_should_be_true(self):
rsql = '1=eq=1'
parsed = parse(rsql)
self.assertTrue(parsed(None))
def test_const_eq_const_when_ne_should_be_false(self):
rsql = '1=eq=0'
parsed = parse(rsql)
self.assertFalse(parsed(None))
def test_const_symbolic_ne_const_when_eq_should_be_false(self):
rsql = '1!=1'
parsed = parse(rsql)
self.assertFalse(parsed(None))
def test_const_symbolic_ne_const_when_ne_should_be_true(self):
rsql = '1!=0'
parsed = parse(rsql)
self.assertTrue(parsed(None))
def test_const_ne_const_when_eq_should_be_false(self):
rsql = '1=ne=1'
parsed = parse(rsql)
self.assertFalse(parsed(None))
def test_const_ne_const_when_ne_should_be_true(self):
rsql = '1=ne=0'
parsed = parse(rsql)
self.assertTrue(parsed(None))
def test_const_gt_const_when_gt_should_be_true(self):
rsql = '1=gt=0'
parsed = parse(rsql)
self.assertTrue(parsed(None))
def test_const_gt_const_when_eq_should_be_false(self):
rsql = '1=gt=1'
parsed = parse(rsql)
self.assertFalse(parsed(None))
def test_const_gt_const_when_lt_should_be_false(self):
rsql = '1=gt=2'
parsed = parse(rsql)
self.assertFalse(parsed(None))
def test_const_ge_const_when_gt_should_be_true(self):
rsql = '1=ge=0'
parsed = parse(rsql)
self.assertTrue(parsed(None))
def test_const_ge_const_when_eq_should_be_true(self):
rsql = '1=ge=1'
parsed = parse(rsql)
self.assertTrue(parsed(None))
def test_const_ge_const_when_lt_should_be_false(self):
rsql = '1=ge=2'
parsed = parse(rsql)
self.assertFalse(parsed(None))
def test_const_lt_const_when_lt_should_be_true(self):
rsql = '0=lt=1'
parsed = parse(rsql)
self.assertTrue(parsed(None))
def test_const_lt_const_when_eq_should_be_false(self):
rsql = '1=lt=1'
parsed = parse(rsql)
self.assertFalse(parsed(None))
def test_const_lt_const_when_gt_should_be_false(self):
rsql = '2=lt=1'
parsed = parse(rsql)
self.assertFalse(parsed(None))
def test_const_le_const_when_lt_should_be_true(self):
rsql = '0=le=1'
parsed = parse(rsql)
self.assertTrue(parsed(None))
def test_const_le_const_when_eq_should_be_true(self):
rsql = '1=le=1'
parsed = parse(rsql)
self.assertTrue(parsed(None))
def test_const_le_const_when_gt_should_be_false(self):
rsql = '2=le=1'
parsed = parse(rsql)
self.assertFalse(parsed(None))
def test_const_in_const_when_in_should_be_true(self):
rsql = '"a"=in="abc"'
parsed = parse(rsql)
self.assertTrue(parsed(None))
def test_const_contains_const_when_in_should_be_true(self):
rsql = '"abc"=contains="b"'
parsed = parse(rsql)
self.assertTrue(parsed(None))
def test_parse_id_eq_1(self):
rsql = 'id==1'
parsed = parse(rsql)
self.assertTrue(parsed({'id': 1}))
def test_parse_id_eq_1_and_name_eq_literal_test(self):
rsql = 'id==1&name=="test"'
parsed = parse(rsql)
self.assertTrue(parsed({'id': 1, 'name': 'test'}))
def test_parse_precedence(self):
rsql = 'id==1|name=="test"&id!=1'
parsed = parse(rsql)
self.assertTrue(parsed({'id': 1, 'name': 'test'}))
def test_parse_array(self):
rsql = 'children==[1,"a",3.1]'
parsed = parse(rsql)
self.assertTrue(parsed({'children': (1, 'a', 3.1)}))
| 29.877551
| 80
| 0.634107
| 620
| 4,392
| 4.16129
| 0.080645
| 0.075969
| 0.162791
| 0.206202
| 0.9
| 0.891085
| 0.863178
| 0.846124
| 0.807752
| 0.672093
| 0
| 0.016641
| 0.247495
| 4,392
| 146
| 81
| 30.082192
| 0.763994
| 0
| 0
| 0.469565
| 0
| 0
| 0.058288
| 0.010246
| 0
| 0
| 0
| 0
| 0.243478
| 1
| 0.243478
| false
| 0
| 0.017391
| 0
| 0.269565
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9d49b2ee9bdea5be89d6344bbdff552b0376e3fb
| 533
|
py
|
Python
|
src/api/errors.py
|
freundTech/Amulet-Map-Editor
|
4123c5bb36489dab6a5d4784e40597fcac435bcf
|
[
"MIT"
] | 1
|
2021-11-12T01:26:06.000Z
|
2021-11-12T01:26:06.000Z
|
src/api/errors.py
|
freundTech/Amulet-Map-Editor
|
4123c5bb36489dab6a5d4784e40597fcac435bcf
|
[
"MIT"
] | null | null | null |
src/api/errors.py
|
freundTech/Amulet-Map-Editor
|
4123c5bb36489dab6a5d4784e40597fcac435bcf
|
[
"MIT"
] | null | null | null |
class FormatError(Exception):
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args, **kwargs)
class FormatLoaderInvalidFormat(FormatError):
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args, **kwargs)
class FormatLoaderMismatched(FormatError):
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args, **kwargs)
class FormatLoaderNoneMatched(FormatError):
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args, **kwargs)
| 28.052632
| 49
| 0.688555
| 52
| 533
| 6.442308
| 0.211538
| 0.191045
| 0.286567
| 0.429851
| 0.716418
| 0.716418
| 0.716418
| 0.716418
| 0.716418
| 0.716418
| 0
| 0
| 0.168856
| 533
| 18
| 50
| 29.611111
| 0.756208
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
c20aafc99e98e35fe61f5259a83ca195cb40477e
| 595
|
py
|
Python
|
11_debugging/04_stack.py
|
varshashivhare/Mastering-Python
|
6101fa7855e57d0bbd194e936084bd64d9d38d76
|
[
"MIT"
] | 30
|
2016-10-28T18:14:15.000Z
|
2021-08-29T15:20:56.000Z
|
11_debugging/04_stack.py
|
varshashivhare/Mastering-Python
|
6101fa7855e57d0bbd194e936084bd64d9d38d76
|
[
"MIT"
] | null | null | null |
11_debugging/04_stack.py
|
varshashivhare/Mastering-Python
|
6101fa7855e57d0bbd194e936084bd64d9d38d76
|
[
"MIT"
] | 31
|
2016-09-10T22:47:12.000Z
|
2022-03-13T04:50:35.000Z
|
import traceback
class Spam(object):
def run(self):
print('Before stack print')
traceback.print_stack()
print('After stack print')
class Eggs(Spam):
pass
if __name__ == '__main__':
eggs = Eggs()
eggs.run()
##############################################################################
import traceback
class Spam(object):
def run(self):
print('Before stack print')
traceback.print_stack(limit=-1)
print('After stack print')
class Eggs(Spam):
pass
if __name__ == '__main__':
eggs = Eggs()
eggs.run()
| 14.875
| 78
| 0.517647
| 62
| 595
| 4.677419
| 0.306452
| 0.172414
| 0.137931
| 0.165517
| 0.97931
| 0.97931
| 0.97931
| 0.97931
| 0.97931
| 0.97931
| 0
| 0.002222
| 0.243697
| 595
| 39
| 79
| 15.25641
| 0.642222
| 0
| 0
| 0.909091
| 0
| 0
| 0.166344
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0.090909
| 0.090909
| 0
| 0.363636
| 0.272727
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
c24773ee79d7bcb590cfaf17610852ae350c0bd8
| 6,132
|
py
|
Python
|
python/TestAPI/SignUpTest.py
|
toilatester/sample-automation-frameworks-across-languages
|
4c1ceb3f8fff14ed838f94c92be7d92013c95d4a
|
[
"Apache-2.0"
] | 8
|
2020-12-11T06:57:12.000Z
|
2021-10-11T12:53:49.000Z
|
python/TestAPI/SignUpTest.py
|
toilatester/sample-automation-frameworks-across-languages
|
4c1ceb3f8fff14ed838f94c92be7d92013c95d4a
|
[
"Apache-2.0"
] | null | null | null |
python/TestAPI/SignUpTest.py
|
toilatester/sample-automation-frameworks-across-languages
|
4c1ceb3f8fff14ed838f94c92be7d92013c95d4a
|
[
"Apache-2.0"
] | 2
|
2021-04-06T08:14:35.000Z
|
2021-08-05T01:43:54.000Z
|
from API.SignUp import SignUp
from Core.Base.BaseTest import BaseTest
from Core.Assertions.Assertion import APIAssert
from Utils.DataGenerateUtils import DataGenerateUtils as DataGenerate
from Utils.JSONUtils import JSONUtils
class SignUpTest(BaseTest):
INVALID_PARAM = 13
EXISTING_EMAIL = 12
def __init__(self, *args, **kwargs):
super(BaseTest, self).__init__(*args, **kwargs)
self.is_api_test = True
self.__sign_up = SignUp()
self.__data = DataGenerate()
def tearDown(self):
super().tearDown()
self.API_PAY_LOAD.append(self.__sign_up.post_data)
def test_sign_up_successfully_with_valid_data(self):
self.__sign_up.create_new_user_account(username=self.__data.create_name(),
email=self.__data.create_email(),
password=self.__data.create_password())
APIAssert.should_run_api_successfully(
self.assert_container(self.assertEqual, self.__sign_up.response_code, 200),
self.assert_container(self.assertIn, self.__sign_up.post_data['username'], self.__sign_up.response_body),
self.assert_container(self.assertIn, self.__sign_up.post_data['email'], self.__sign_up.response_body),
self.assert_container(self.assertGreater,
len(JSONUtils.get_value_json_string(self.__sign_up.response_body, "jwtToken")), 20)
)
def test_sign_up_unsuccessfully_with_existing_email(self):
username = self.__data.create_name()
email = self.__data.create_email()
password = self.__data.create_password()
self.__sign_up.create_new_user_account(username=username,
email=email,
password=password)
self.API_PAY_LOAD.append(self.__sign_up.post_data)
self.__sign_up.create_new_user_account(username=username,
email=email,
password=password)
APIAssert.should_run_api_successfully(
self.assert_container(self.assertEqual, self.__sign_up.response_code, 400),
self.assert_container(self.assertEqual, self.EXISTING_EMAIL,
JSONUtils.get_value_json_string(self.__sign_up.response_body, "error")['code'])
)
def test_sign_up_unsuccessfully_with_blank_username(self):
self.__sign_up.create_new_user_account(email=self.__data.create_email(),
password=self.__data.create_number())
APIAssert.should_run_api_successfully(
self.assert_container(self.assertEqual, self.__sign_up.response_code, 400),
self.assert_container(self.assertEqual, self.INVALID_PARAM,
JSONUtils.get_value_json_string(self.__sign_up.response_body, "error")['code'])
)
def test_sign_up_unsuccessfully_with_blank_email(self):
self.__sign_up.create_new_user_account(username=self.__data.create_name(),
password=self.__data.create_number())
APIAssert.should_run_api_successfully(
self.assert_container(self.assertEqual, self.__sign_up.response_code, 400),
self.assert_container(self.assertEqual, self.INVALID_PARAM,
JSONUtils.get_value_json_string(self.__sign_up.response_body, "error")['code'])
)
def test_sign_up_unsuccessfully_with_blank_password(self):
self.__sign_up.create_new_user_account(username=self.__data.create_name(),
email=self.__data.create_email())
APIAssert.should_run_api_successfully(
self.assert_container(self.assertEqual, self.__sign_up.response_code, 400),
self.assert_container(self.assertEqual, self.INVALID_PARAM,
JSONUtils.get_value_json_string(self.__sign_up.response_body, "error")['code'])
)
def test_sign_up_unsuccessfully_with_blank_username_email(self):
self.__sign_up.create_new_user_account(password=self.__data.create_number())
APIAssert.should_run_api_successfully(
self.assert_container(self.assertEqual, self.__sign_up.response_code, 400),
self.assert_container(self.assertEqual, self.INVALID_PARAM,
JSONUtils.get_value_json_string(self.__sign_up.response_body, "error")['code'])
)
def test_sign_up_unsuccessfully_with_blank_username_password(self):
self.__sign_up.create_new_user_account(email=self.__data.create_email())
APIAssert.should_run_api_successfully(
self.assert_container(self.assertEqual, self.__sign_up.response_code, 400),
self.assert_container(self.assertEqual, self.INVALID_PARAM,
JSONUtils.get_value_json_string(self.__sign_up.response_body, "error")['code'])
)
def test_sign_up_unsuccessfully_with_blank_email_password(self):
self.__sign_up.create_new_user_account(username=self.__data.create_name())
APIAssert.should_run_api_successfully(
self.assert_container(self.assertEqual, self.__sign_up.response_code, 400),
self.assert_container(self.assertEqual, self.INVALID_PARAM,
JSONUtils.get_value_json_string(self.__sign_up.response_body, "error")['code'])
)
def test_sign_up_unsuccessfully_with_blank_all(self):
self.__sign_up.create_new_user_account()
APIAssert.should_run_api_successfully(
self.assert_container(self.assertEqual, self.__sign_up.response_code, 400),
self.assert_container(self.assertEqual, self.INVALID_PARAM,
JSONUtils.get_value_json_string(self.__sign_up.response_body, "error")['code'])
)
| 56.256881
| 118
| 0.648891
| 681
| 6,132
| 5.312775
| 0.108664
| 0.072968
| 0.096739
| 0.127142
| 0.863184
| 0.863184
| 0.854616
| 0.854616
| 0.845218
| 0.813709
| 0
| 0.00733
| 0.265819
| 6,132
| 108
| 119
| 56.777778
| 0.796313
| 0
| 0
| 0.473684
| 1
| 0
| 0.015438
| 0
| 0
| 0
| 0
| 0
| 0.315789
| 1
| 0.115789
| false
| 0.105263
| 0.052632
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
dfb6fd6d6704bd36e652d5d21ae2a080b9fe3c51
| 38,890
|
py
|
Python
|
tests/test_observation.py
|
glichtner/fhir.resources
|
94896d8f8a0b7dd69253762aab968f4fd6eb69a0
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_observation.py
|
glichtner/fhir.resources
|
94896d8f8a0b7dd69253762aab968f4fd6eb69a0
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_observation.py
|
glichtner/fhir.resources
|
94896d8f8a0b7dd69253762aab968f4fd6eb69a0
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/StructureDefinition/Observation
Release: R5
Version: 4.5.0
Build ID: 0d95498
Last updated: 2021-04-03T00:34:11.075+00:00
"""
from pydantic.validators import bytes_validator # noqa: F401
from fhir.resources import fhirtypes # noqa: F401
from fhir.resources import observation
def impl_observation_1(inst):
assert inst.bodySite.coding[0].code == "71341001:272741003=7771000"
assert inst.bodySite.coding[0].system == "http://snomed.info/sct"
assert inst.bodySite.text == "Left Femur"
assert inst.code.coding[0].code == "24701-5"
assert inst.code.coding[0].display == "Femur DXA Bone density"
assert inst.code.coding[0].system == "http://loinc.org"
assert inst.code.text == "BMD - Left Femur"
assert inst.id == "bmd"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.performer[0].display == "Acme Imaging Diagnostics"
assert (
inst.performer[0].reference
== "Organization/1832473e-2fe0-452d-abe9-3cdb9879522f"
)
assert inst.status == "final"
assert inst.subject.reference == "Patient/pat2"
assert inst.text.status == "generated"
assert inst.valueQuantity.code == "g/cm-2"
assert inst.valueQuantity.system == "http://unitsofmeasure.org"
assert inst.valueQuantity.unit == "g/cm²"
assert float(inst.valueQuantity.value) == float(0.887)
def test_observation_1(base_settings):
"""No. 1 tests collection for Observation.
Test File: observation-example-bmd.json
"""
filename = base_settings["unittest_data_dir"] / "observation-example-bmd.json"
inst = observation.Observation.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Observation" == inst.resource_type
impl_observation_1(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "Observation" == data["resourceType"]
inst2 = observation.Observation(**data)
impl_observation_1(inst2)
def impl_observation_2(inst):
assert inst.code.coding[0].code == "789-8"
assert (
inst.code.coding[0].display
== "Erythrocytes [#/volume] in Blood by Automated count"
)
assert inst.code.coding[0].system == "http://loinc.org"
assert inst.effectivePeriod.end == fhirtypes.DateTime.validate(
"2013-04-05T10:30:10+01:00"
)
assert inst.effectivePeriod.start == fhirtypes.DateTime.validate(
"2013-04-02T10:30:10+01:00"
)
assert inst.id == "f004"
assert (
inst.identifier[0].system
== "http://www.bmc.nl/zorgportal/identifiers/observations"
)
assert inst.identifier[0].use == "official"
assert inst.identifier[0].value == "6326"
assert inst.interpretation[0].coding[0].code == "L"
assert inst.interpretation[0].coding[0].display == "Low"
assert inst.interpretation[0].coding[0].system == (
"http://terminology.hl7.org/CodeSystem/v3-ObservationInterpre" "tation"
)
assert inst.issued == fhirtypes.Instant.validate("2013-04-03T15:30:10+01:00")
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.performer[0].display == "A. Langeveld"
assert inst.performer[0].reference == "Practitioner/f005"
assert inst.status == "final"
assert inst.subject.display == "P. van de Heuvel"
assert inst.subject.reference == "Patient/f001"
assert inst.text.status == "generated"
assert inst.valueQuantity.code == "10*12/L"
assert inst.valueQuantity.system == "http://unitsofmeasure.org"
assert inst.valueQuantity.unit == "10^12/L"
assert float(inst.valueQuantity.value) == float(4.12)
def test_observation_2(base_settings):
"""No. 2 tests collection for Observation.
Test File: observation-example-f004-erythrocyte.json
"""
filename = (
base_settings["unittest_data_dir"] / "observation-example-f004-erythrocyte.json"
)
inst = observation.Observation.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Observation" == inst.resource_type
impl_observation_2(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "Observation" == data["resourceType"]
inst2 = observation.Observation(**data)
impl_observation_2(inst2)
def impl_observation_3(inst):
assert inst.code.text == "eye color"
assert inst.effectiveDateTime == fhirtypes.DateTime.validate("2016-05-18")
assert inst.id == "eye-color"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.status == "final"
assert inst.subject.reference == "Patient/example"
assert inst.text.status == "generated"
assert inst.valueString == "blue"
def test_observation_3(base_settings):
"""No. 3 tests collection for Observation.
Test File: observation-example-eye-color.json
"""
filename = base_settings["unittest_data_dir"] / "observation-example-eye-color.json"
inst = observation.Observation.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Observation" == inst.resource_type
impl_observation_3(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "Observation" == data["resourceType"]
inst2 = observation.Observation(**data)
impl_observation_3(inst2)
def impl_observation_4(inst):
assert inst.category[0].coding[0].code == "survey"
assert inst.category[0].coding[0].display == "Survey"
assert (
inst.category[0].coding[0].system
== "http://terminology.hl7.org/CodeSystem/observation-category"
)
assert inst.category[0].text == "Survey"
assert inst.code.coding[0].code == "9271-8"
assert inst.code.coding[0].display == "10 minute Apgar Score"
assert inst.code.coding[0].system == "http://loinc.org"
assert inst.code.coding[1].code == "169922007"
assert inst.code.coding[1].display == "Apgar at 10 minutes"
assert inst.code.coding[1].system == "http://snomed.info/sct"
assert inst.code.text == "10 minute Apgar Score"
assert inst.component[0].code.coding[0].code == "32401-2"
assert inst.component[0].code.coding[0].display == "10 minute Apgar Color"
assert inst.component[0].code.coding[0].system == "http://loinc.org"
assert inst.component[0].code.coding[1].code == "249227004"
assert inst.component[0].code.coding[1].display == "Apgar color score"
assert inst.component[0].code.coding[1].system == "http://snomed.info/sct"
assert inst.component[0].code.text == "Apgar color score"
assert inst.component[0].valueCodeableConcept.coding[0].code == "LA6724-4"
assert (
inst.component[0].valueCodeableConcept.coding[0].display
== "Good color all over"
)
assert (
inst.component[0].valueCodeableConcept.coding[0].extension[0].url
== "http://hl7.org/fhir/StructureDefinition/ordinalValue"
)
assert float(
inst.component[0].valueCodeableConcept.coding[0].extension[0].valueDecimal
) == float(2)
assert inst.component[0].valueCodeableConcept.coding[0].system == "http://loinc.org"
assert inst.component[0].valueCodeableConcept.coding[1].code == "2"
assert (
inst.component[0].valueCodeableConcept.coding[1].system
== "http://acme.ped/apgarcolor"
)
assert inst.component[0].valueCodeableConcept.text == "2. Good color all over"
assert inst.component[1].code.coding[0].code == "32402-0"
assert inst.component[1].code.coding[0].display == "10 minute Apgar Heart Rate"
assert inst.component[1].code.coding[0].system == "http://loinc.org"
assert inst.component[1].code.coding[1].code == "249223000"
assert inst.component[1].code.coding[1].display == "Apgar heart rate score"
assert inst.component[1].code.coding[1].system == "http://snomed.info/sct"
assert inst.component[1].code.text == "Apgar respiratory effort score"
assert inst.component[1].valueCodeableConcept.coding[0].code == "LA6718-6"
assert (
inst.component[1].valueCodeableConcept.coding[0].display
== "At least 100 beats per minute"
)
assert (
inst.component[1].valueCodeableConcept.coding[0].extension[0].url
== "http://hl7.org/fhir/StructureDefinition/ordinalValue"
)
assert float(
inst.component[1].valueCodeableConcept.coding[0].extension[0].valueDecimal
) == float(2)
assert inst.component[1].valueCodeableConcept.coding[0].system == "http://loinc.org"
assert inst.component[1].valueCodeableConcept.coding[1].code == "2"
assert (
inst.component[1].valueCodeableConcept.coding[1].system
== "http://acme.ped/apgarheartrate"
)
assert (
inst.component[1].valueCodeableConcept.text
== "2. At least 100 beats per minute"
)
assert inst.component[2].code.coding[0].code == "32404-6"
assert (
inst.component[2].code.coding[0].display
== "10 minute Apgar Reflex Irritability"
)
assert inst.component[2].code.coding[0].system == "http://loinc.org"
assert inst.component[2].code.coding[1].code == "249226008"
assert (
inst.component[2].code.coding[1].display == "Apgar response to stimulus score"
)
assert inst.component[2].code.coding[1].system == "http://snomed.info/sct"
assert inst.component[2].code.text == "Apgar response to stimulus score"
assert inst.component[2].valueCodeableConcept.coding[0].code == "LA6721-0"
assert (
inst.component[2].valueCodeableConcept.coding[0].display
== "Grimace and pulling away, cough, or sneeze during suctioning"
)
assert (
inst.component[2].valueCodeableConcept.coding[0].extension[0].url
== "http://hl7.org/fhir/StructureDefinition/ordinalValue"
)
assert float(
inst.component[2].valueCodeableConcept.coding[0].extension[0].valueDecimal
) == float(2)
assert inst.component[2].valueCodeableConcept.coding[0].system == "http://loinc.org"
assert inst.component[2].valueCodeableConcept.coding[1].code == "2"
assert (
inst.component[2].valueCodeableConcept.coding[1].system
== "http://acme.ped/apgarreflexirritability"
)
assert inst.component[2].valueCodeableConcept.text == (
"2. Grimace and pulling away, cough, or sneeze during " "suctioning"
)
assert inst.component[3].code.coding[0].code == "32403-8"
assert inst.component[3].code.coding[0].display == "10 minute Apgar Muscle Tone"
assert inst.component[3].code.coding[0].system == "http://loinc.org"
assert inst.component[3].code.coding[1].code == "249225007"
assert inst.component[3].code.coding[1].display == "Apgar muscle tone score"
assert inst.component[3].code.coding[1].system == "http://snomed.info/sct"
assert inst.component[3].code.text == "Apgar muscle tone score"
assert inst.component[3].valueCodeableConcept.coding[0].code == "LA6715-2"
assert inst.component[3].valueCodeableConcept.coding[0].display == "Active motion "
assert (
inst.component[3].valueCodeableConcept.coding[0].extension[0].url
== "http://hl7.org/fhir/StructureDefinition/ordinalValue"
)
assert float(
inst.component[3].valueCodeableConcept.coding[0].extension[0].valueDecimal
) == float(2)
assert inst.component[3].valueCodeableConcept.coding[0].system == "http://loinc.org"
assert inst.component[3].valueCodeableConcept.coding[1].code == "2"
assert (
inst.component[3].valueCodeableConcept.coding[1].system
== "http://acme.ped/apgarmuscletone"
)
assert inst.component[3].valueCodeableConcept.text == "2. Active motion"
assert inst.component[4].code.coding[0].code == "32405-3"
assert (
inst.component[4].code.coding[0].display == "10 minute Apgar Respiratory effort"
)
assert inst.component[4].code.coding[0].system == "http://loinc.org"
assert inst.component[4].code.coding[1].code == "249224006"
assert inst.component[4].code.coding[1].display == "Apgar respiratory effort score"
assert inst.component[4].code.coding[1].system == "http://snomed.info/sct"
assert inst.component[4].code.text == "Apgar respiratory effort score"
assert inst.component[4].valueCodeableConcept.coding[0].code == "LA6727-7"
assert (
inst.component[4].valueCodeableConcept.coding[0].display
== "Good, strong cry; normal rate and effort of breathing "
)
assert (
inst.component[4].valueCodeableConcept.coding[0].extension[0].url
== "http://hl7.org/fhir/StructureDefinition/ordinalValue"
)
assert float(
inst.component[4].valueCodeableConcept.coding[0].extension[0].valueDecimal
) == float(2)
assert inst.component[4].valueCodeableConcept.coding[0].system == "http://loinc.org"
assert inst.component[4].valueCodeableConcept.coding[1].code == "2"
assert (
inst.component[4].valueCodeableConcept.coding[1].system
== "http://acme.ped/apgarrespiratoryeffort"
)
assert (
inst.component[4].valueCodeableConcept.text
== "2. Good, strong cry; normal rate and effort of breathing"
)
assert inst.contained[0].id == "newborn"
assert inst.effectiveDateTime == fhirtypes.DateTime.validate("2016-05-18T22:33:22Z")
assert inst.id == "10minute-apgar-score"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.performer[0].reference == "Practitioner/example"
assert inst.status == "final"
assert inst.subject.reference == "#newborn"
assert inst.text.status == "generated"
assert inst.valueQuantity.code == "{score}"
assert inst.valueQuantity.system == "http://unitsofmeasure.org"
assert float(inst.valueQuantity.value) == float(10)
def test_observation_4(base_settings):
"""No. 4 tests collection for Observation.
Test File: observation-example-10minute-apgar-score.json
"""
filename = (
base_settings["unittest_data_dir"]
/ "observation-example-10minute-apgar-score.json"
)
inst = observation.Observation.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Observation" == inst.resource_type
impl_observation_4(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "Observation" == data["resourceType"]
inst2 = observation.Observation(**data)
impl_observation_4(inst2)
def impl_observation_5(inst):
assert inst.category[0].coding[0].code == "vital-signs"
assert inst.category[0].coding[0].display == "Vital Signs"
assert (
inst.category[0].coding[0].system
== "http://terminology.hl7.org/CodeSystem/observation-category"
)
assert inst.category[0].text == "Vital Signs"
assert inst.code.coding[0].code == "2708-6"
assert inst.code.coding[0].display == "Oxygen saturation in Arterial blood"
assert inst.code.coding[0].system == "http://loinc.org"
assert inst.code.coding[1].code == "59408-5"
assert (
inst.code.coding[1].display
== "Oxygen saturation in Arterial blood by Pulse oximetry"
)
assert inst.code.coding[1].system == "http://loinc.org"
assert inst.code.coding[2].code == "150456"
assert inst.code.coding[2].display == "MDC_PULS_OXIM_SAT_O2"
assert inst.code.coding[2].system == "urn:iso:std:iso:11073:10101"
assert inst.device.reference == "DeviceMetric/example"
assert inst.effectiveDateTime == fhirtypes.DateTime.validate(
"2014-12-05T09:30:10+01:00"
)
assert inst.id == "satO2"
assert inst.identifier[0].system == "http://goodcare.org/observation/id"
assert inst.identifier[0].value == "o1223435-10"
assert inst.interpretation[0].coding[0].code == "N"
assert inst.interpretation[0].coding[0].display == "Normal"
assert inst.interpretation[0].coding[0].system == (
"http://terminology.hl7.org/CodeSystem/v3-ObservationInterpre" "tation"
)
assert inst.interpretation[0].text == "Normal (applies to non-numeric results)"
assert inst.meta.profile[0] == "http://hl7.org/fhir/StructureDefinition/vitalsigns"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.partOf[0].reference == "Procedure/ob"
assert inst.referenceRange[0].high.code == "%"
assert inst.referenceRange[0].high.system == "http://unitsofmeasure.org"
assert inst.referenceRange[0].high.unit == "%"
assert float(inst.referenceRange[0].high.value) == float(99)
assert inst.referenceRange[0].low.code == "%"
assert inst.referenceRange[0].low.system == "http://unitsofmeasure.org"
assert inst.referenceRange[0].low.unit == "%"
assert float(inst.referenceRange[0].low.value) == float(90)
assert inst.status == "final"
assert inst.subject.reference == "Patient/example"
assert inst.text.status == "generated"
assert inst.valueQuantity.code == "%"
assert inst.valueQuantity.system == "http://unitsofmeasure.org"
assert inst.valueQuantity.unit == "%"
assert float(inst.valueQuantity.value) == float(95)
def test_observation_5(base_settings):
"""No. 5 tests collection for Observation.
Test File: observation-example-satO2.json
"""
filename = base_settings["unittest_data_dir"] / "observation-example-satO2.json"
inst = observation.Observation.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Observation" == inst.resource_type
impl_observation_5(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "Observation" == data["resourceType"]
inst2 = observation.Observation(**data)
impl_observation_5(inst2)
def impl_observation_6(inst):
assert inst.category[0].coding[0].code == "vital-signs"
assert inst.category[0].coding[0].display == "Vital Signs"
assert (
inst.category[0].coding[0].system
== "http://terminology.hl7.org/CodeSystem/observation-category"
)
assert inst.code.coding[0].code == "29463-7"
assert inst.code.coding[0].display == "Body Weight"
assert inst.code.coding[0].system == "http://loinc.org"
assert inst.code.coding[1].code == "3141-9"
assert inst.code.coding[1].display == "Body weight Measured"
assert inst.code.coding[1].system == "http://loinc.org"
assert inst.code.coding[2].code == "27113001"
assert inst.code.coding[2].display == "Body weight"
assert inst.code.coding[2].system == "http://snomed.info/sct"
assert inst.code.coding[3].code == "body-weight"
assert inst.code.coding[3].display == "Body Weight"
assert inst.code.coding[3].system == "http://acme.org/devices/clinical-codes"
assert inst.effectiveDateTime == fhirtypes.DateTime.validate("2016-03-28")
assert inst.encounter.reference == "Encounter/example"
assert inst.id == "example"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.status == "final"
assert inst.subject.reference == "Patient/example"
assert inst.text.status == "generated"
assert inst.valueQuantity.code == "[lb_av]"
assert inst.valueQuantity.system == "http://unitsofmeasure.org"
assert inst.valueQuantity.unit == "lbs"
assert float(inst.valueQuantity.value) == float(185)
def test_observation_6(base_settings):
"""No. 6 tests collection for Observation.
Test File: observation-example.json
"""
filename = base_settings["unittest_data_dir"] / "observation-example.json"
inst = observation.Observation.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Observation" == inst.resource_type
impl_observation_6(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "Observation" == data["resourceType"]
inst2 = observation.Observation(**data)
impl_observation_6(inst2)
def impl_observation_7(inst):
assert inst.category[0].coding[0].code == "laboratory"
assert inst.category[0].coding[0].display == "Laboratory"
assert (
inst.category[0].coding[0].system
== "http://terminology.hl7.org/CodeSystem/observation-category"
)
assert inst.category[0].text == "Laboratory"
assert inst.code.coding[0].code == "883-9"
assert inst.code.coding[0].display == "ABO group [Type] in Blood"
assert inst.code.coding[0].system == "http://loinc.org"
assert inst.code.text == "Blood Group"
assert inst.effectiveDateTime == fhirtypes.DateTime.validate(
"2018-03-11T16:07:54+00:00"
)
assert inst.id == "bloodgroup"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.status == "final"
assert inst.subject.reference == "Patient/infant"
assert inst.text.status == "generated"
assert inst.valueCodeableConcept.coding[0].code == "112144000"
assert inst.valueCodeableConcept.coding[0].display == "Blood group A (finding)"
assert inst.valueCodeableConcept.coding[0].system == "http://snomed.info/sct"
assert inst.valueCodeableConcept.text == "A"
def test_observation_7(base_settings):
"""No. 7 tests collection for Observation.
Test File: observation-example-bloodgroup.json
"""
filename = (
base_settings["unittest_data_dir"] / "observation-example-bloodgroup.json"
)
inst = observation.Observation.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Observation" == inst.resource_type
impl_observation_7(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "Observation" == data["resourceType"]
inst2 = observation.Observation(**data)
impl_observation_7(inst2)
def impl_observation_8(inst):
assert inst.category[0].coding[0].code == "survey"
assert inst.category[0].coding[0].display == "Survey"
assert (
inst.category[0].coding[0].system
== "http://terminology.hl7.org/CodeSystem/observation-category"
)
assert inst.category[0].text == "Survey"
assert inst.code.coding[0].code == "9274-2"
assert inst.code.coding[0].display == "5 minute Apgar Score"
assert inst.code.coding[0].system == "http://loinc.org"
assert inst.code.coding[1].code == "169909004"
assert inst.code.coding[1].display == "Apgar at 5 minutes"
assert inst.code.coding[1].system == "http://snomed.info/sct"
assert inst.code.text == "5 minute Apgar Score"
assert inst.component[0].code.coding[0].code == "32411-1"
assert inst.component[0].code.coding[0].display == "5 minute Apgar Color"
assert inst.component[0].code.coding[0].system == "http://loinc.org"
assert inst.component[0].code.coding[1].code == "249227004"
assert inst.component[0].code.coding[1].display == "Apgar color score"
assert inst.component[0].code.coding[1].system == "http://snomed.info/sct"
assert inst.component[0].code.text == "Apgar color score"
assert inst.component[0].valueCodeableConcept.coding[0].code == "LA6724-4"
assert (
inst.component[0].valueCodeableConcept.coding[0].display
== "Good color all over"
)
assert (
inst.component[0].valueCodeableConcept.coding[0].extension[0].url
== "http://hl7.org/fhir/StructureDefinition/ordinalValue"
)
assert float(
inst.component[0].valueCodeableConcept.coding[0].extension[0].valueDecimal
) == float(2)
assert inst.component[0].valueCodeableConcept.coding[0].system == "http://loinc.org"
assert inst.component[0].valueCodeableConcept.coding[1].code == "2"
assert (
inst.component[0].valueCodeableConcept.coding[1].system
== "http://acme.ped/apgarcolor"
)
assert inst.component[0].valueCodeableConcept.text == "2. Good color all over"
assert inst.component[1].code.coding[0].code == "32412-9"
assert inst.component[1].code.coding[0].display == "5 minute Apgar Heart Rate"
assert inst.component[1].code.coding[0].system == "http://loinc.org"
assert inst.component[1].code.coding[1].code == "249223000"
assert inst.component[1].code.coding[1].display == "Apgar heart rate score"
assert inst.component[1].code.coding[1].system == "http://snomed.info/sct"
assert inst.component[1].code.text == "Apgar respiratory effort score"
assert inst.component[1].valueCodeableConcept.coding[0].code == "LA6718-6"
assert (
inst.component[1].valueCodeableConcept.coding[0].display
== "At least 100 beats per minute"
)
assert (
inst.component[1].valueCodeableConcept.coding[0].extension[0].url
== "http://hl7.org/fhir/StructureDefinition/ordinalValue"
)
assert float(
inst.component[1].valueCodeableConcept.coding[0].extension[0].valueDecimal
) == float(2)
assert inst.component[1].valueCodeableConcept.coding[0].system == "http://loinc.org"
assert inst.component[1].valueCodeableConcept.coding[1].code == "2"
assert (
inst.component[1].valueCodeableConcept.coding[1].system
== "http://acme.ped/apgarheartrate"
)
assert (
inst.component[1].valueCodeableConcept.text
== "2. At least 100 beats per minute"
)
assert inst.component[2].code.coding[0].code == "32414-5"
assert (
inst.component[2].code.coding[0].display == "5 minute Apgar Reflex Irritability"
)
assert inst.component[2].code.coding[0].system == "http://loinc.org"
assert inst.component[2].code.coding[1].code == "249226008"
assert (
inst.component[2].code.coding[1].display == "Apgar response to stimulus score"
)
assert inst.component[2].code.coding[1].system == "http://snomed.info/sct"
assert inst.component[2].code.text == "Apgar response to stimulus score"
assert inst.component[2].valueCodeableConcept.coding[0].code == "LA6721-0"
assert (
inst.component[2].valueCodeableConcept.coding[0].display
== "Grimace and pulling away, cough, or sneeze during suctioning"
)
assert (
inst.component[2].valueCodeableConcept.coding[0].extension[0].url
== "http://hl7.org/fhir/StructureDefinition/ordinalValue"
)
assert float(
inst.component[2].valueCodeableConcept.coding[0].extension[0].valueDecimal
) == float(2)
assert inst.component[2].valueCodeableConcept.coding[0].system == "http://loinc.org"
assert inst.component[2].valueCodeableConcept.coding[1].code == "2"
assert (
inst.component[2].valueCodeableConcept.coding[1].system
== "http://acme.ped/apgarreflexirritability"
)
assert inst.component[2].valueCodeableConcept.text == (
"2. Grimace and pulling away, cough, or sneeze during " "suctioning"
)
assert inst.component[3].code.coding[0].code == "32413-7"
assert inst.component[3].code.coding[0].display == "5 minute Apgar Muscle Tone"
assert inst.component[3].code.coding[0].system == "http://loinc.org"
assert inst.component[3].code.coding[1].code == "249225007"
assert inst.component[3].code.coding[1].display == "Apgar muscle tone score"
assert inst.component[3].code.coding[1].system == "http://snomed.info/sct"
assert inst.component[3].code.text == "Apgar muscle tone score"
assert inst.component[3].valueCodeableConcept.coding[0].code == "LA6715-2"
assert inst.component[3].valueCodeableConcept.coding[0].display == "Active motion "
assert (
inst.component[3].valueCodeableConcept.coding[0].extension[0].url
== "http://hl7.org/fhir/StructureDefinition/ordinalValue"
)
assert float(
inst.component[3].valueCodeableConcept.coding[0].extension[0].valueDecimal
) == float(2)
assert inst.component[3].valueCodeableConcept.coding[0].system == "http://loinc.org"
assert inst.component[3].valueCodeableConcept.coding[1].code == "2"
assert (
inst.component[3].valueCodeableConcept.coding[1].system
== "http://acme.ped/apgarmuscletone"
)
assert inst.component[3].valueCodeableConcept.text == "2. Active motion"
assert inst.component[4].code.coding[0].code == "32415-2"
assert (
inst.component[4].code.coding[0].display == "5 minute Apgar Respiratory effort"
)
assert inst.component[4].code.coding[0].system == "http://loinc.org"
assert inst.component[4].code.coding[1].code == "249224006"
assert inst.component[4].code.coding[1].display == "Apgar respiratory effort score"
assert inst.component[4].code.coding[1].system == "http://snomed.info/sct"
assert inst.component[4].code.text == "Apgar respiratory effort score"
assert inst.component[4].valueCodeableConcept.coding[0].code == "LA6727-7"
assert (
inst.component[4].valueCodeableConcept.coding[0].display
== "Good, strong cry; normal rate and effort of breathing "
)
assert (
inst.component[4].valueCodeableConcept.coding[0].extension[0].url
== "http://hl7.org/fhir/StructureDefinition/ordinalValue"
)
assert float(
inst.component[4].valueCodeableConcept.coding[0].extension[0].valueDecimal
) == float(2)
assert inst.component[4].valueCodeableConcept.coding[0].system == "http://loinc.org"
assert inst.component[4].valueCodeableConcept.coding[1].code == "2"
assert (
inst.component[4].valueCodeableConcept.coding[1].system
== "http://acme.ped/apgarrespiratoryeffort"
)
assert (
inst.component[4].valueCodeableConcept.text
== "2. Good, strong cry; normal rate and effort of breathing"
)
assert inst.contained[0].id == "newborn"
assert inst.effectiveDateTime == fhirtypes.DateTime.validate("2016-05-18T22:33:22Z")
assert inst.id == "5minute-apgar-score"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.performer[0].reference == "Practitioner/example"
assert inst.status == "final"
assert inst.subject.reference == "#newborn"
assert inst.text.status == "generated"
assert inst.valueQuantity.code == "{score}"
assert inst.valueQuantity.system == "http://unitsofmeasure.org"
assert float(inst.valueQuantity.value) == float(10)
def test_observation_8(base_settings):
"""No. 8 tests collection for Observation.
Test File: observation-example-5minute-apgar-score.json
"""
filename = (
base_settings["unittest_data_dir"]
/ "observation-example-5minute-apgar-score.json"
)
inst = observation.Observation.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Observation" == inst.resource_type
impl_observation_8(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "Observation" == data["resourceType"]
inst2 = observation.Observation(**data)
impl_observation_8(inst2)
def impl_observation_9(inst):
assert inst.category[0].coding[0].code == "procedure"
assert inst.category[0].coding[0].display == "Procedure"
assert (
inst.category[0].coding[0].system
== "http://terminology.hl7.org/CodeSystem/observation-category"
)
assert inst.code.coding[0].code == "131328"
assert inst.code.coding[0].display == "MDC_ECG_ELEC_POTL"
assert inst.code.coding[0].system == "urn:oid:2.16.840.1.113883.6.24"
assert inst.component[0].code.coding[0].code == "131329"
assert inst.component[0].code.coding[0].display == "MDC_ECG_ELEC_POTL_I"
assert inst.component[0].code.coding[0].system == "urn:oid:2.16.840.1.113883.6.24"
assert inst.component[0].valueSampledData.dimensions == 1
assert float(inst.component[0].valueSampledData.factor) == float(1.612)
assert float(inst.component[0].valueSampledData.lowerLimit) == float(-3300)
assert float(inst.component[0].valueSampledData.origin.value) == float(2048)
assert float(inst.component[0].valueSampledData.period) == float(10)
assert float(inst.component[0].valueSampledData.upperLimit) == float(3300)
assert inst.component[1].code.coding[0].code == "131330"
assert inst.component[1].code.coding[0].display == "MDC_ECG_ELEC_POTL_II"
assert inst.component[1].code.coding[0].system == "urn:oid:2.16.840.1.113883.6.24"
assert inst.component[1].valueSampledData.dimensions == 1
assert float(inst.component[1].valueSampledData.factor) == float(1.612)
assert float(inst.component[1].valueSampledData.lowerLimit) == float(-3300)
assert float(inst.component[1].valueSampledData.origin.value) == float(2048)
assert float(inst.component[1].valueSampledData.period) == float(10)
assert float(inst.component[1].valueSampledData.upperLimit) == float(3300)
assert inst.component[2].code.coding[0].code == "131389"
assert inst.component[2].code.coding[0].display == "MDC_ECG_ELEC_POTL_III"
assert inst.component[2].code.coding[0].system == "urn:oid:2.16.840.1.113883.6.24"
assert inst.component[2].valueSampledData.dimensions == 1
assert float(inst.component[2].valueSampledData.factor) == float(1.612)
assert float(inst.component[2].valueSampledData.lowerLimit) == float(-3300)
assert float(inst.component[2].valueSampledData.origin.value) == float(2048)
assert float(inst.component[2].valueSampledData.period) == float(10)
assert float(inst.component[2].valueSampledData.upperLimit) == float(3300)
assert inst.device.display == "12 lead EKG Device Metric"
assert inst.effectiveDateTime == fhirtypes.DateTime.validate(
"2015-02-19T09:30:35+01:00"
)
assert inst.id == "ekg"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.performer[0].display == "A. Langeveld"
assert inst.performer[0].reference == "Practitioner/f005"
assert inst.status == "final"
assert inst.subject.display == "P. van de Heuvel"
assert inst.subject.reference == "Patient/f001"
assert inst.text.status == "generated"
def test_observation_9(base_settings):
"""No. 9 tests collection for Observation.
Test File: observation-example-sample-data.json
"""
filename = (
base_settings["unittest_data_dir"] / "observation-example-sample-data.json"
)
inst = observation.Observation.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Observation" == inst.resource_type
impl_observation_9(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "Observation" == data["resourceType"]
inst2 = observation.Observation(**data)
impl_observation_9(inst2)
def impl_observation_10(inst):
assert inst.code.coding[0].code == "15074-8"
assert inst.code.coding[0].display == "Glucose [Moles/volume] in Blood"
assert inst.code.coding[0].system == "http://loinc.org"
assert inst.effectivePeriod.start == fhirtypes.DateTime.validate(
"2013-04-02T09:30:10+01:00"
)
assert inst.id == "f001"
assert (
inst.identifier[0].system
== "http://www.bmc.nl/zorgportal/identifiers/observations"
)
assert inst.identifier[0].use == "official"
assert inst.identifier[0].value == "6323"
assert inst.interpretation[0].coding[0].code == "H"
assert inst.interpretation[0].coding[0].display == "High"
assert inst.interpretation[0].coding[0].system == (
"http://terminology.hl7.org/CodeSystem/v3-ObservationInterpre" "tation"
)
assert inst.issued == fhirtypes.Instant.validate("2013-04-03T15:30:10+01:00")
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.performer[0].display == "A. Langeveld"
assert inst.performer[0].reference == "Practitioner/f005"
assert inst.referenceRange[0].high.code == "mmol/L"
assert inst.referenceRange[0].high.system == "http://unitsofmeasure.org"
assert inst.referenceRange[0].high.unit == "mmol/l"
assert float(inst.referenceRange[0].high.value) == float(6.2)
assert inst.referenceRange[0].low.code == "mmol/L"
assert inst.referenceRange[0].low.system == "http://unitsofmeasure.org"
assert inst.referenceRange[0].low.unit == "mmol/l"
assert float(inst.referenceRange[0].low.value) == float(3.1)
assert inst.status == "final"
assert inst.subject.display == "P. van de Heuvel"
assert inst.subject.reference == "Patient/f001"
assert inst.text.status == "generated"
assert inst.valueQuantity.code == "mmol/L"
assert inst.valueQuantity.system == "http://unitsofmeasure.org"
assert inst.valueQuantity.unit == "mmol/l"
assert float(inst.valueQuantity.value) == float(6.3)
def test_observation_10(base_settings):
"""No. 10 tests collection for Observation.
Test File: observation-example-f001-glucose.json
"""
filename = (
base_settings["unittest_data_dir"] / "observation-example-f001-glucose.json"
)
inst = observation.Observation.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Observation" == inst.resource_type
impl_observation_10(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "Observation" == data["resourceType"]
inst2 = observation.Observation(**data)
impl_observation_10(inst2)
| 44.092971
| 88
| 0.682309
| 4,889
| 38,890
| 5.392514
| 0.077726
| 0.146412
| 0.109543
| 0.036413
| 0.92706
| 0.911508
| 0.874336
| 0.832347
| 0.755879
| 0.714611
| 0
| 0.047412
| 0.167498
| 38,890
| 881
| 89
| 44.143019
| 0.766895
| 0.044202
| 0
| 0.592398
| 0
| 0
| 0.227068
| 0.021534
| 0
| 0
| 0
| 0
| 0.579292
| 1
| 0.026212
| false
| 0
| 0.003932
| 0
| 0.030144
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a070ef2d369283b3d2aca97372b99742d7de894d
| 744
|
py
|
Python
|
d05.py
|
f-koehler/advent_of_code_2017
|
63c01f6aa0786b6808fe0e2ba0125590cb28ba12
|
[
"MIT"
] | null | null | null |
d05.py
|
f-koehler/advent_of_code_2017
|
63c01f6aa0786b6808fe0e2ba0125590cb28ba12
|
[
"MIT"
] | null | null | null |
d05.py
|
f-koehler/advent_of_code_2017
|
63c01f6aa0786b6808fe0e2ba0125590cb28ba12
|
[
"MIT"
] | null | null | null |
if __name__ == "__main__":
with open("input_05.txt") as fhandle:
instructions = [int(line.strip()) for line in fhandle]
position = 0
steps = 0
while position < len(instructions):
offset = instructions[position]
instructions[position] += 1
position += offset
steps += 1
print(steps)
with open("input_05.txt") as fhandle:
instructions = [int(line.strip()) for line in fhandle]
position = 0
steps = 0
while position < len(instructions):
offset = instructions[position]
if offset >= 3:
instructions[position] -= 1
else:
instructions[position] += 1
position += offset
steps += 1
print(steps)
| 24.8
| 62
| 0.575269
| 80
| 744
| 5.225
| 0.325
| 0.239234
| 0.150718
| 0.07177
| 0.894737
| 0.894737
| 0.894737
| 0.894737
| 0.894737
| 0.650718
| 0
| 0.027613
| 0.318548
| 744
| 29
| 63
| 25.655172
| 0.796844
| 0
| 0
| 0.833333
| 0
| 0
| 0.043011
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.083333
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a08b901014ee89c294b063ed98a28d4efc7dd346
| 2,073
|
py
|
Python
|
scripts/MergeByRefGene/merge_by_ref_gene_test.py
|
cfe-lab/mixed-hcv
|
ce5991e248017471bb587a07371f904c8ec42537
|
[
"Apache-2.0"
] | null | null | null |
scripts/MergeByRefGene/merge_by_ref_gene_test.py
|
cfe-lab/mixed-hcv
|
ce5991e248017471bb587a07371f904c8ec42537
|
[
"Apache-2.0"
] | null | null | null |
scripts/MergeByRefGene/merge_by_ref_gene_test.py
|
cfe-lab/mixed-hcv
|
ce5991e248017471bb587a07371f904c8ec42537
|
[
"Apache-2.0"
] | null | null | null |
from unittest.case import TestCase
from StringIO import StringIO
from merge_by_ref_gene import merge_by_ref_gene
class MergeByRefGeneTest(TestCase):
def setUp(self):
self.addTypeEqualityFunc(str, self.assertMultiLineEqual)
def test_simple(self):
aminos = StringIO("""\
rname,gene,aa.pos,A,C,D,E,F,G,H,I,K,L,M,N,P,Q,R,S,T,V,W,Y,*
r1,NS3,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
r1,NS3,1,0,1,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0
r1,NS3,2,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0
r1,NS3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0
r2,NS3,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
r2,NS3,1,0,0,2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0
r2,NS3,2,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0
r2,NS3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0
""")
expected_merged = """\
gene,ref.pos,A,C,D,E,F,G,H,I,K,L,M,N,P,Q,R,S,T,V,W,Y,*
NS3,1,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
NS3,2,0,1,2,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0
NS3,3,0,0,0,0,0,0,0,8,0,0,0,0,0,0,0,0,0,0,0,0,0
NS3,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,0,0,0
"""
merged = StringIO()
merge_by_ref_gene(aminos, merged)
self.maxDiff = None
self.assertEqual(expected_merged, merged.getvalue())
def test_consensus(self):
""" Make sure 10 A's are chosen over 2 S's to make APIT, not SPIT. """
aminos = StringIO("""\
rname,gene,aa.pos,A,C,D,E,F,G,H,I,K,L,M,N,P,Q,R,S,T,V,W,Y,*
r1,NS3,0,10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0
r1,NS3,1,0,0,0,0,0,0,0,0,0,0,0,0,12,0,0,0,0,0,0,0,0
r1,NS3,2,0,0,0,0,0,0,0,12,0,0,0,0,0,0,0,0,0,0,0,0,0
r1,NS3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12,0,0,0,0
""")
expected_merged = """\
gene,ref.pos,A,C,D,E,F,G,H,I,K,L,M,N,P,Q,R,S,T,V,W,Y,*
NS3,1,10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0
NS3,2,0,0,0,0,0,0,0,0,0,0,0,0,12,0,0,0,0,0,0,0,0
NS3,3,0,0,0,0,0,0,0,12,0,0,0,0,0,0,0,0,0,0,0,0,0
NS3,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12,0,0,0,0
"""
merged = StringIO()
merge_by_ref_gene(aminos, merged)
self.maxDiff = None
self.assertEqual(expected_merged, merged.getvalue())
| 35.741379
| 78
| 0.578389
| 662
| 2,073
| 1.783988
| 0.108761
| 0.599492
| 0.80271
| 0.944962
| 0.770533
| 0.770533
| 0.7663
| 0.7663
| 0.764606
| 0.755292
| 0
| 0.262073
| 0.11095
| 2,073
| 57
| 79
| 36.368421
| 0.37873
| 0.029908
| 0
| 0.416667
| 0
| 0.5
| 0.62007
| 0.604094
| 0
| 0
| 0
| 0
| 0.0625
| 1
| 0.0625
| false
| 0
| 0.0625
| 0
| 0.145833
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 14
|
265954b1251c8a106cf359b0a81cea8e8c1566be
| 6,833
|
py
|
Python
|
utils/dataset.py
|
Chen-chen680/StockAnalysis
|
5b6e1366ae012c5ac9090bf7a2c1c6e22425154c
|
[
"MIT"
] | 1
|
2022-03-19T04:19:59.000Z
|
2022-03-19T04:19:59.000Z
|
utils/dataset.py
|
Chen-chen680/StockAnalysis
|
5b6e1366ae012c5ac9090bf7a2c1c6e22425154c
|
[
"MIT"
] | null | null | null |
utils/dataset.py
|
Chen-chen680/StockAnalysis
|
5b6e1366ae012c5ac9090bf7a2c1c6e22425154c
|
[
"MIT"
] | null | null | null |
from torch.utils.data import DataLoader, Dataset, random_split
import numpy as np
class TrainDataset(Dataset):
def __init__(self, path, sequence):
self.sequence = sequence
with open(path, 'r', encoding='utf-8-sig') as f:
content_list = f.readlines()
# 归一化
max_list, min_list = [], []
data_list = []
time_list = []
for index, one_line in enumerate(content_list):
if index == 0:
continue
data_list.append(one_line.replace('\n', '').split(',')[2:])
time_list.append(one_line.replace('\n', '').split(',')[1])
data_array = np.array(data_list, dtype=np.float32)
for i in range(data_array.shape[1]):
max_list.append(np.max(data_array[:,i]))
min_list.append(np.min(data_array[:,i]))
data_array[:,i] = (data_array[:,i] - np.min(data_array[:,i])) / (np.max(data_array[:,i]) - np.min(data_array[:,i]))
self.max_list, self.min_list = max_list, min_list
content_list = list(data_array)
content_list = content_list[0: int(0.7 * len(content_list))]
time_list = time_list[0:int(0.7 * len(time_list))]
self.time_list = time_list
self.content_list = content_list
def __getitem__(self, item):
data = self.content_list[item: item + self.sequence]
label = self.content_list[item + self.sequence][1]
data = np.array(data, dtype=np.float32)
label = np.array(label, dtype=np.float32)
return data, label
def __len__(self):
return len(self.content_list) - self.sequence
class TestDataset(Dataset):
def __init__(self, path, sequence):
self.sequence = sequence
with open(path, 'r', encoding='utf-8-sig') as f:
content_list = f.readlines()
# 归一化
max_list, min_list = [], []
data_list = []
time_list = []
for index, one_line in enumerate(content_list):
if index == 0:
continue
data_list.append(one_line.replace('\n', '').split(',')[2:])
time_list.append(one_line.replace('\n', '').split(',')[1])
data_array = np.array(data_list, dtype=np.float32)
for i in range(data_array.shape[1]):
max_list.append(np.max(data_array[:,i]))
min_list.append(np.min(data_array[:,i]))
data_array[:,i] = (data_array[:,i] - np.min(data_array[:,i])) / (np.max(data_array[:,i]) - np.min(data_array[:,i]))
content_list = list(data_array)
content_list = content_list[int(0.7 * len(content_list)): len(content_list) - 1]
time_list = time_list[int(0.7 * len(time_list)): len(time_list) - 1]
time_list = time_list[self.sequence:]
self.time_list = time_list
self.content_list = content_list
self.max_list, self.min_list = max_list, min_list
def __getitem__(self, item):
data = self.content_list[item: item + self.sequence]
label = self.content_list[item + self.sequence][1]
data = np.array(data, dtype=np.float32)
label = np.array(label, dtype=np.float32)
return data, label
def __len__(self):
return len(self.content_list) - self.sequence
class TrainDatasetOnlyShoupan(Dataset):
def __init__(self, path, sequence):
self.sequence = sequence
with open(path, 'r', encoding='utf-8-sig') as f:
content_list = f.readlines()
# 归一化
max_list, min_list = [], []
data_list = []
time_list = []
for index, one_line in enumerate(content_list):
if index == 0:
continue
data_list.append(one_line.replace('\n', '').split(',')[2:])
time_list.append(one_line.replace('\n', '').split(',')[1])
data_array = np.array(data_list, dtype=np.float32)
data_array = data_array[:,1]
self.max_value = np.max(data_array)
self.min_value = np.min(data_array)
data_array = (data_array - self.min_value) / (self.max_value - self.min_value)
content_list = list(data_array)
content_list = content_list[0: int(0.7 * len(content_list))]
time_list = time_list[0:int(0.7 * len(time_list))]
self.time_list = time_list
self.content_list = content_list
def __getitem__(self, item):
data = self.content_list[item: item + self.sequence]
label = self.content_list[item + self.sequence]
data = np.array(data, dtype=np.float32)
label = np.array(label, dtype=np.float32)
return data, label
def __len__(self):
return len(self.content_list) - self.sequence
class TestDatasetOnlyShoupan(Dataset):
def __init__(self, path, sequence):
self.sequence = sequence
with open(path, 'r', encoding='utf-8-sig') as f:
content_list = f.readlines()
# 归一化
max_list, min_list = [], []
data_list = []
time_list = []
for index, one_line in enumerate(content_list):
if index == 0:
continue
data_list.append(one_line.replace('\n', '').split(',')[2:])
time_list.append(one_line.replace('\n', '').split(',')[1])
data_array = np.array(data_list, dtype=np.float32)
data_array = data_array[:,1]
self.max_value = np.max(data_array)
self.min_value = np.min(data_array)
data_array = (data_array - self.min_value) / (self.max_value - self.min_value)
content_list = list(data_array)
content_list = content_list[int(0.7 * len(content_list)): len(content_list) - 1]
time_list = time_list[int(0.7 * len(time_list)): len(time_list) - 1]
self.time_list = time_list
self.content_list = content_list
def __getitem__(self, item):
data = self.content_list[item: item + self.sequence]
label = self.content_list[item + self.sequence]
data = np.array(data, dtype=np.float32)
label = np.array(label, dtype=np.float32)
return data, label
def __len__(self):
return len(self.content_list) - self.sequence
if __name__ == '__main__':
# train_dataset = TrainDataset(r'C:\Users\12517\Desktop\股票分析\上证指数.csv', 30)
test_dataset = TestDatasetOnlyShoupan(r'C:\Users\12517\Desktop\股票分析\上证指数.csv', 30)
# train_length = int(0.8 * len(dataset))
# test_length = len(dataset) - train_length
# train_dataset, test_dataset = random_split(dataset=dataset, lengths=[train_length, test_length])
train_loader = DataLoader(dataset=test_dataset, batch_size=32, shuffle=False)
print(len(test_dataset.time_list), len(test_dataset.content_list))
# import torch
# for data, label in train_loader:
# data = torch.unsqueeze(data, dim=2)
# print(data.shape)
| 40.672619
| 127
| 0.609981
| 923
| 6,833
| 4.252438
| 0.095341
| 0.13172
| 0.061147
| 0.036688
| 0.86293
| 0.861911
| 0.857834
| 0.857834
| 0.857834
| 0.841529
| 0
| 0.017393
| 0.251134
| 6,833
| 168
| 128
| 40.672619
| 0.749658
| 0.054881
| 0
| 0.917293
| 0
| 0
| 0.016755
| 0.005585
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090226
| false
| 0
| 0.015038
| 0.030075
| 0.195489
| 0.007519
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cd4b1f14e8d0318f11c2ed8649f2c37bc597f848
| 2,402
|
py
|
Python
|
utils.py
|
Anand1310/Bonobos-Tic-Tac-Toe
|
26e1af5c46455af0f37e1bc803799f825c373b5a
|
[
"MIT"
] | null | null | null |
utils.py
|
Anand1310/Bonobos-Tic-Tac-Toe
|
26e1af5c46455af0f37e1bc803799f825c373b5a
|
[
"MIT"
] | null | null | null |
utils.py
|
Anand1310/Bonobos-Tic-Tac-Toe
|
26e1af5c46455af0f37e1bc803799f825c373b5a
|
[
"MIT"
] | null | null | null |
from typing import Tuple
class Vec:
def __init__(self, x, y):
self.x: int = x
self.y: int = y
def __iter__(self):
return iter((self.x, self.y))
def __add__(self, other):
if isinstance(other, Vec):
return Vec(self.x + other.x, self.y + other.y)
elif isinstance(other, int):
return Vec(self.x + other, self.y + other)
elif (
isinstance(other, Tuple)
and isinstance(other[0], int)
and isinstance(other[1], int)
):
return Vec(self.x + other[0], self.y + other[1])
else:
raise ValueError(f"Can not add Position with {type(other)}")
def __sub__(self, other):
if isinstance(other, Vec):
return Vec(self.x - other.x, self.y - other.y)
elif isinstance(other, int):
return Vec(self.x - other, self.y - other)
elif (
isinstance(other, Tuple)
and isinstance(other[0], int)
and isinstance(other[1], int)
):
return Vec(self.x - other[0], self.y - other[1])
else:
raise ValueError(f"Can not add Position with {type(other)}")
def __truediv__(self, other):
if isinstance(other, Vec):
return Vec(self.x // other.x, self.y // other.y)
elif isinstance(other, int):
return Vec(self.x // other, self.y // other)
elif (
isinstance(other, Tuple)
and isinstance(other[0], int)
and isinstance(other[1], int)
):
return Vec(self.x // other[0], self.y // other[1])
else:
raise ValueError(f"Can not add Position with {type(other)}")
def __floordiv__(self, other):
return self.__truediv__(self, other)
def __mul__(self, other):
if isinstance(other, Vec):
return Vec(self.x * other.x, self.y * other.y)
elif isinstance(other, int):
return Vec(self.x * other, self.y * other)
elif (
isinstance(other, Tuple)
and isinstance(other[0], int)
and isinstance(other[1], int)
):
return Vec(self.x * other[0], self.y * other[1])
else:
raise ValueError(f"Can not add Position with {type(other)}")
def __repr__(self) -> str:
return f"Position({self.x}, {self.y})"
| 32.459459
| 72
| 0.532889
| 308
| 2,402
| 4.038961
| 0.12013
| 0.241158
| 0.125402
| 0.135048
| 0.845659
| 0.845659
| 0.845659
| 0.845659
| 0.845659
| 0.845659
| 0
| 0.010076
| 0.338884
| 2,402
| 73
| 73
| 32.90411
| 0.7733
| 0
| 0
| 0.571429
| 0
| 0
| 0.076603
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.126984
| false
| 0
| 0.015873
| 0.047619
| 0.396825
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cd5265f81f14a36e69f6facff60518f13f13df3e
| 21,184
|
py
|
Python
|
test/engine/test_engine.py
|
lpubsppop01/my_todo_app
|
dbcade237c5b3f78bc196952ffe192436e2f9a45
|
[
"Zlib"
] | null | null | null |
test/engine/test_engine.py
|
lpubsppop01/my_todo_app
|
dbcade237c5b3f78bc196952ffe192436e2f9a45
|
[
"Zlib"
] | 2
|
2022-02-25T14:01:58.000Z
|
2022-02-25T14:01:59.000Z
|
test/engine/test_engine.py
|
lpubsppop01/my_todo_app
|
dbcade237c5b3f78bc196952ffe192436e2f9a45
|
[
"Zlib"
] | null | null | null |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
from datetime import datetime
from unittest import TestCase
from freezegun import freeze_time
from my_todo_app.engine.engine import TaskEngine, InsertTo
from my_todo_app.engine.task_sqlite3 import SQLite3TaskDatabase
class TestTaskEngine(TestCase):
def test_tasklist_crud(self):
class_name = self.__class__.__name__
func_name = sys._getframe().f_code.co_name
db_path = os.path.join(os.path.dirname(__file__), '{}_{}.sqlite3'.format(class_name, func_name))
if os.path.exists(db_path):
os.remove(db_path)
db = SQLite3TaskDatabase(db_path)
engine = TaskEngine(db)
self.assertEqual(0, len(engine.shown_tasklists))
self.assertEqual(0, len(engine.shown_tasks))
self.assertEqual(None, engine.selected_tasklist)
self.assertEqual(None, engine.selected_task)
engine.add_tasklist('Inbox')
engine.add_tasklist('Foo')
engine.add_tasklist('Bar')
engine.add_tasklist('Baz')
self.assertEqual(4, len(engine.shown_tasklists))
self.assertEqual('Inbox', engine.shown_tasklists[0].name)
self.assertEqual('Foo', engine.shown_tasklists[1].name)
self.assertEqual('Bar', engine.shown_tasklists[2].name)
self.assertEqual('Baz', engine.shown_tasklists[3].name)
self.assertEqual(engine.shown_tasklists[3], engine.selected_tasklist)
self.assertEqual(None, engine.selected_task)
self.assertTrue(engine.can_up_selected_tasklist())
engine.select_tasklist(engine.shown_tasklists[1].id)
engine.edit_selected_tasklist(name='Next Action')
self.assertEqual(engine.shown_tasklists[1], engine.selected_tasklist)
self.assertEqual('Next Action', engine.selected_tasklist.name)
engine.select_tasklist(engine.shown_tasklists[2].id)
engine.remove_selected_tasklist()
self.assertEqual(3, len(engine.shown_tasklists))
self.assertEqual(engine.shown_tasklists[2], engine.selected_tasklist)
self.assertEqual('Baz', engine.selected_tasklist.name)
engine.remove_selected_tasklist()
self.assertEqual(2, len(engine.shown_tasklists))
self.assertEqual(engine.shown_tasklists[1], engine.selected_tasklist)
self.assertEqual('Next Action', engine.selected_tasklist.name)
db._conn.close()
os.remove(db_path)
def test_tasklist_up_down(self):
class_name = self.__class__.__name__
func_name = sys._getframe().f_code.co_name
db_path = os.path.join(os.path.dirname(__file__), '{}_{}.sqlite3'.format(class_name, func_name))
if os.path.exists(db_path):
os.remove(db_path)
db = SQLite3TaskDatabase(db_path)
engine = TaskEngine(db)
engine.add_tasklist('Inbox')
engine.add_tasklist('Foo')
engine.add_tasklist('Bar')
engine.add_tasklist('Baz')
self.assertTrue(engine.can_up_selected_tasklist())
engine.up_selected_tasklist()
self.assertEqual('Inbox', engine.shown_tasklists[0].name)
self.assertEqual('Foo', engine.shown_tasklists[1].name)
self.assertEqual('Baz', engine.shown_tasklists[2].name)
self.assertEqual('Bar', engine.shown_tasklists[3].name)
self.assertEqual(engine.shown_tasklists[2], engine.selected_tasklist)
self.assertTrue(engine.can_up_selected_tasklist())
engine.up_selected_tasklist()
self.assertEqual('Inbox', engine.shown_tasklists[0].name)
self.assertEqual('Baz', engine.shown_tasklists[1].name)
self.assertEqual('Foo', engine.shown_tasklists[2].name)
self.assertEqual('Bar', engine.shown_tasklists[3].name)
self.assertEqual(engine.shown_tasklists[1], engine.selected_tasklist)
self.assertTrue(engine.can_up_selected_tasklist())
engine.up_selected_tasklist()
self.assertEqual('Baz', engine.shown_tasklists[0].name)
self.assertEqual('Inbox', engine.shown_tasklists[1].name)
self.assertEqual('Foo', engine.shown_tasklists[2].name)
self.assertEqual('Bar', engine.shown_tasklists[3].name)
self.assertEqual(engine.shown_tasklists[0], engine.selected_tasklist)
self.assertFalse(engine.can_up_selected_tasklist())
self.assertTrue(engine.can_down_selected_tasklist())
engine.down_selected_tasklist()
self.assertEqual('Inbox', engine.shown_tasklists[0].name)
self.assertEqual('Baz', engine.shown_tasklists[1].name)
self.assertEqual('Foo', engine.shown_tasklists[2].name)
self.assertEqual('Bar', engine.shown_tasklists[3].name)
self.assertEqual(engine.shown_tasklists[1], engine.selected_tasklist)
self.assertTrue(engine.can_down_selected_tasklist())
engine.down_selected_tasklist()
self.assertEqual('Inbox', engine.shown_tasklists[0].name)
self.assertEqual('Foo', engine.shown_tasklists[1].name)
self.assertEqual('Baz', engine.shown_tasklists[2].name)
self.assertEqual('Bar', engine.shown_tasklists[3].name)
self.assertEqual(engine.shown_tasklists[2], engine.selected_tasklist)
self.assertTrue(engine.can_down_selected_tasklist())
engine.down_selected_tasklist()
self.assertEqual('Inbox', engine.shown_tasklists[0].name)
self.assertEqual('Foo', engine.shown_tasklists[1].name)
self.assertEqual('Bar', engine.shown_tasklists[2].name)
self.assertEqual('Baz', engine.shown_tasklists[3].name)
self.assertEqual(engine.shown_tasklists[3], engine.selected_tasklist)
self.assertFalse(engine.can_down_selected_tasklist())
db._conn.close()
os.remove(db_path)
def test_task_crud(self):
class_name = self.__class__.__name__
func_name = sys._getframe().f_code.co_name
db_path = os.path.join(os.path.dirname(__file__), '{}_{}.sqlite3'.format(class_name, func_name))
if os.path.exists(db_path):
os.remove(db_path)
db = SQLite3TaskDatabase(db_path)
engine = TaskEngine(db)
engine.add_tasklist('Inbox')
engine.add_tasklist('Next Action')
engine.select_tasklist(engine.shown_tasklists[0].id)
datetime_20190927_120000 = datetime(2019, 9, 27, 12, 0, 0)
with freeze_time(datetime_20190927_120000):
engine.add_task()
self.assertEqual(1, len(engine.shown_tasks))
self.assertEqual('', engine.shown_tasks[0].name)
self.assertEqual(datetime_20190927_120000.timestamp(), engine.shown_tasks[0].created_at)
self.assertEqual(datetime_20190927_120000.timestamp(), engine.shown_tasks[0].updated_at)
self.assertEqual(engine.shown_tasks[0], engine.selected_task)
self.assertEqual(engine.selected_tasklist.id, engine.selected_task.list_id)
datetime_20190927_120500 = datetime(2019, 9, 27, 12, 5, 0)
with freeze_time(datetime_20190927_120500):
engine.edit_selected_task(name='Add TaskEngine unit tests', memo='Memo')
self.assertEqual('Add TaskEngine unit tests', engine.selected_task.name)
self.assertEqual('Memo', engine.selected_task.memo)
self.assertEqual(datetime_20190927_120500.timestamp(), engine.selected_task.updated_at)
datetime_20190927_121000 = datetime(2019, 9, 27, 12, 10, 0)
with freeze_time(datetime_20190927_121000):
engine.add_task()
self.assertEqual(2, len(engine.shown_tasks))
self.assertEqual('', engine.shown_tasks[0].name)
self.assertEqual(datetime_20190927_121000.timestamp(), engine.shown_tasks[0].updated_at)
self.assertEqual('Add TaskEngine unit tests', engine.shown_tasks[1].name)
self.assertEqual(engine.shown_tasks[0], engine.selected_task)
engine.select_task(engine.shown_tasks[1].id)
datetime_20190927_121500 = datetime(2019, 9, 27, 12, 15, 0)
with freeze_time(datetime_20190927_121500):
engine.move_selected_task(list_id=engine.shown_tasklists[1].id)
self.assertEqual(1, len(engine.shown_tasks))
engine.select_tasklist(engine.shown_tasklists[1].id)
self.assertEqual(1, len(engine.shown_tasks))
self.assertEqual('Add TaskEngine unit tests', engine.shown_tasks[0].name)
self.assertEqual(datetime_20190927_121500.timestamp(), engine.shown_tasks[0].updated_at)
self.assertEqual(engine.shown_tasks[0], engine.selected_task)
self.assertEqual(engine.selected_tasklist.id, engine.selected_task.list_id)
datetime_20190927_121500 = datetime(2019, 9, 27, 12, 15, 0)
datetime_20190927_122000 = datetime(2019, 9, 27, 12, 20, 0)
datetime_20190927_122500 = datetime(2019, 9, 27, 12, 25, 0)
with freeze_time(datetime_20190927_121500):
engine.add_task(name='Foo')
with freeze_time(datetime_20190927_122000):
engine.add_task(name='Bar')
with freeze_time(datetime_20190927_122500):
engine.add_task(name='Baz')
self.assertEqual(4, len(engine.shown_tasks))
self.assertEqual('Foo', engine.shown_tasks[2].name)
self.assertEqual(datetime_20190927_121500.timestamp(), engine.shown_tasks[2].updated_at)
self.assertEqual('Bar', engine.shown_tasks[1].name)
self.assertEqual(datetime_20190927_122000.timestamp(), engine.shown_tasks[1].updated_at)
self.assertEqual('Baz', engine.shown_tasks[0].name)
self.assertEqual(datetime_20190927_122500.timestamp(), engine.shown_tasks[0].updated_at)
self.assertEqual(engine.shown_tasks[0], engine.selected_task)
engine.select_task(engine.shown_tasks[2].id)
engine.remove_selected_task()
self.assertEqual(3, len(engine.shown_tasks))
self.assertEqual('Add TaskEngine unit tests', engine.shown_tasks[2].name)
self.assertEqual('Bar', engine.shown_tasks[1].name)
self.assertEqual('Baz', engine.shown_tasks[0].name)
self.assertEqual(engine.shown_tasks[2], engine.selected_task)
engine.remove_selected_task()
self.assertEqual(2, len(engine.shown_tasks))
self.assertEqual('Bar', engine.shown_tasks[1].name)
self.assertEqual('Baz', engine.shown_tasks[0].name)
self.assertEqual(engine.shown_tasks[1], engine.selected_task)
engine.select_task(engine.shown_tasks[1].id)
datetime_20190927_123000 = datetime(2019, 9, 27, 12, 30, 0)
with freeze_time(datetime_20190927_123000):
engine.edit_selected_task(completed=True)
self.assertTrue(engine.shown_tasks[1].completed)
self.assertEqual(datetime_20190927_123000.timestamp(), engine.shown_tasks[1].completed_at)
self.assertEqual(datetime_20190927_123000.timestamp(), engine.shown_tasks[1].updated_at)
engine.select_task(engine.shown_tasks[1].id)
engine.archive_selected_task()
self.assertEqual(1, len(engine.shown_tasks))
self.assertEqual('Baz', engine.shown_tasks[0].name)
self.assertFalse(engine.shown_tasks[0].archived)
engine.shows_archive = True
self.assertEqual(2, len(engine.shown_tasks))
self.assertEqual(engine.shown_tasks[0], engine.selected_task)
self.assertEqual('Bar', engine.shown_tasks[1].name)
self.assertTrue(engine.shown_tasks[1].archived)
db._conn.close()
os.remove(db_path)
def test_task_up_down(self):
class_name = self.__class__.__name__
func_name = sys._getframe().f_code.co_name
db_path = os.path.join(os.path.dirname(__file__), '{}_{}.sqlite3'.format(class_name, func_name))
if os.path.exists(db_path):
os.remove(db_path)
db = SQLite3TaskDatabase(db_path)
engine = TaskEngine(db)
engine.add_tasklist('Inbox')
with freeze_time(datetime(2019, 9, 27, 12, 0, 0)):
engine.add_task(name='Task1')
with freeze_time(datetime(2019, 9, 27, 12, 5, 0)):
engine.add_task(name='Task2', to=InsertTo.LAST_SIBLING)
with freeze_time(datetime(2019, 9, 27, 12, 10, 0)):
engine.add_task(name='Task3', to=InsertTo.LAST_SIBLING)
self.assertTrue(engine.can_up_selected_task())
engine.up_selected_task()
self.assertEqual('Task1', engine.shown_tasks[0].name)
self.assertEqual('Task3', engine.shown_tasks[1].name)
self.assertEqual('Task2', engine.shown_tasks[2].name)
self.assertEqual(engine.shown_tasks[1], engine.selected_task)
self.assertTrue(engine.can_up_selected_task())
engine.up_selected_task()
self.assertEqual('Task3', engine.shown_tasks[0].name)
self.assertEqual('Task1', engine.shown_tasks[1].name)
self.assertEqual('Task2', engine.shown_tasks[2].name)
self.assertEqual(engine.shown_tasks[0], engine.selected_task)
self.assertFalse(engine.can_up_selected_task())
engine.down_selected_task()
self.assertEqual('Task1', engine.shown_tasks[0].name)
self.assertEqual('Task3', engine.shown_tasks[1].name)
self.assertEqual('Task2', engine.shown_tasks[2].name)
self.assertEqual(engine.shown_tasks[1], engine.selected_task)
self.assertTrue(engine.can_down_selected_task())
engine.down_selected_task()
self.assertEqual('Task1', engine.shown_tasks[0].name)
self.assertEqual('Task2', engine.shown_tasks[1].name)
self.assertEqual('Task3', engine.shown_tasks[2].name)
self.assertEqual(engine.shown_tasks[2], engine.selected_task)
self.assertFalse(engine.can_down_selected_task())
db._conn.close()
os.remove(db_path)
def test_sub_task_crud(self):
class_name = self.__class__.__name__
func_name = sys._getframe().f_code.co_name
db_path = os.path.join(os.path.dirname(__file__), '{}_{}.sqlite3'.format(class_name, func_name))
if os.path.exists(db_path):
os.remove(db_path)
db = SQLite3TaskDatabase(db_path)
engine = TaskEngine(db)
engine.add_tasklist('Inbox')
engine.add_tasklist('Next Action')
engine.select_tasklist(engine.shown_tasklists[0].id)
with freeze_time(datetime(2019, 9, 27, 12, 0, 0)):
engine.add_task(name='Task1')
with freeze_time(datetime(2019, 9, 27, 12, 5, 0)):
engine.add_task(name='Task2', to=InsertTo.LAST_SIBLING)
engine.select_task(engine.shown_tasks[0].id)
datetime_20190927_124000 = datetime(2019, 9, 27, 12, 40, 0)
with freeze_time(datetime_20190927_124000):
engine.add_task(name='Sub1', to=InsertTo.LAST_CHILD)
self.assertEqual(3, len(engine.shown_tasks))
self.assertEqual(engine.shown_tasks[1], engine.selected_task)
self.assertEqual('Sub1', engine.shown_tasks[1].name)
self.assertEqual(engine.shown_tasks[0].id, engine.shown_tasks[1].parent_task_id)
self.assertEqual(datetime_20190927_124000.timestamp(), engine.shown_tasks[1].updated_at)
self.assertFalse(engine.can_move_selected_task())
engine.select_task(engine.shown_tasks[0].id)
datetime_20190927_125000 = datetime(2019, 9, 27, 12, 50, 0)
with freeze_time(datetime_20190927_125000):
engine.move_selected_task(list_id=engine._shown_tasklists[1].id)
self.assertEqual(1, len(engine.shown_tasks))
engine.select_tasklist(engine.shown_tasklists[1].id)
self.assertEqual(2, len(engine.shown_tasks))
self.assertEqual('Task1', engine.shown_tasks[0].name)
self.assertEqual('Sub1', engine.shown_tasks[1].name)
self.assertEqual(engine.shown_tasks[0], engine.selected_task)
engine.remove_selected_task()
self.assertEqual(0, len(engine.shown_tasks))
db._conn.close()
os.remove(db_path)
def test_sub_task_archive(self):
class_name = self.__class__.__name__
func_name = sys._getframe().f_code.co_name
db_path = os.path.join(os.path.dirname(__file__), '{}_{}.sqlite3'.format(class_name, func_name))
if os.path.exists(db_path):
os.remove(db_path)
db = SQLite3TaskDatabase(db_path)
engine = TaskEngine(db)
engine.add_tasklist('Inbox')
with freeze_time(datetime(2019, 9, 27, 12, 0, 0)):
engine.add_task(name='Task1')
with freeze_time(datetime(2019, 9, 27, 12, 5, 0)):
engine.add_task(name='Task2', to=InsertTo.LAST_SIBLING)
engine.select_task(engine.shown_tasks[0].id)
with freeze_time(datetime(2019, 9, 27, 12, 10, 0)):
engine.add_task(name='Sub1', to=InsertTo.LAST_CHILD)
with freeze_time(datetime(2019, 9, 27, 12, 15, 0)):
engine.add_task(name='Sub2', to=InsertTo.LAST_SIBLING)
with freeze_time(datetime(2019, 9, 27, 12, 20, 0)):
engine.add_task(name='Sub3', to=InsertTo.LAST_SIBLING)
engine.select_task(engine.shown_tasks[3].id)
with freeze_time(datetime(2019, 9, 27, 14, 0, 0)):
engine.archive_selected_task()
self.assertEqual(4, len(engine.shown_tasks))
engine.shows_archive = True
self.assertEqual(5, len(engine.shown_tasks))
engine.select_task(engine.shown_tasks[3].id)
engine.unarchive_selected_task()
engine.shows_archive = False
self.assertEqual(5, len(engine.shown_tasks))
engine.select_task(engine.shown_tasks[0].id)
engine.archive_selected_task()
self.assertEqual(1, len(engine.shown_tasks))
engine.shows_archive = True
self.assertEqual(5, len(engine.shown_tasks))
engine.select_task(engine.shown_tasks[3].id)
self.assertFalse(engine.can_unarchive_selected_task())
def test_sub_task_up_down(self):
class_name = self.__class__.__name__
func_name = sys._getframe().f_code.co_name
db_path = os.path.join(os.path.dirname(__file__), '{}_{}.sqlite3'.format(class_name, func_name))
if os.path.exists(db_path):
os.remove(db_path)
db = SQLite3TaskDatabase(db_path)
engine = TaskEngine(db)
engine.add_tasklist('Inbox')
with freeze_time(datetime(2019, 9, 27, 12, 0, 0)):
engine.add_task(name='Task1')
with freeze_time(datetime(2019, 9, 27, 12, 5, 0)):
engine.add_task(name='Task2', to=InsertTo.LAST_SIBLING)
engine.select_task(engine.shown_tasks[0].id)
with freeze_time(datetime(2019, 9, 27, 12, 10, 0)):
engine.add_task(name='Sub1', to=InsertTo.LAST_CHILD)
with freeze_time(datetime(2019, 9, 27, 12, 15, 0)):
engine.add_task(name='Sub2', to=InsertTo.LAST_SIBLING)
with freeze_time(datetime(2019, 9, 27, 12, 20, 0)):
engine.add_task(name='Sub3', to=InsertTo.LAST_SIBLING)
engine.select_task(engine.shown_tasks[3].id)
self.assertTrue(engine.can_up_selected_task())
engine.up_selected_task()
self.assertEqual('Sub1', engine.shown_tasks[1].name)
self.assertEqual('Sub3', engine.shown_tasks[2].name)
self.assertEqual('Sub2', engine.shown_tasks[3].name)
self.assertEqual(engine.shown_tasks[2], engine.selected_task)
self.assertTrue(engine.can_up_selected_task())
engine.up_selected_task()
self.assertEqual('Sub3', engine.shown_tasks[1].name)
self.assertEqual('Sub1', engine.shown_tasks[2].name)
self.assertEqual('Sub2', engine.shown_tasks[3].name)
self.assertEqual(engine.shown_tasks[1], engine.selected_task)
self.assertFalse(engine.can_up_selected_task())
engine.down_selected_task()
self.assertEqual('Sub1', engine.shown_tasks[1].name)
self.assertEqual('Sub3', engine.shown_tasks[2].name)
self.assertEqual('Sub2', engine.shown_tasks[3].name)
self.assertEqual(engine.shown_tasks[2], engine.selected_task)
self.assertTrue(engine.can_down_selected_task())
engine.down_selected_task()
self.assertEqual('Sub1', engine.shown_tasks[1].name)
self.assertEqual('Sub2', engine.shown_tasks[2].name)
self.assertEqual('Sub3', engine.shown_tasks[3].name)
self.assertEqual(engine.shown_tasks[3], engine.selected_task)
self.assertFalse(engine.can_down_selected_task())
engine.select_task(engine.shown_tasks[0].id)
engine.down_selected_task()
self.assertEqual('Task2', engine.shown_tasks[0].name)
self.assertEqual('Task1', engine.shown_tasks[1].name)
self.assertEqual(engine.shown_tasks[1], engine.selected_task)
self.assertTrue(engine.can_up_selected_task())
self.assertFalse(engine.can_down_selected_task())
engine.up_selected_task()
self.assertEqual('Task1', engine.shown_tasks[0].name)
self.assertEqual('Task2', engine.shown_tasks[4].name)
self.assertEqual(engine.shown_tasks[0], engine.selected_task)
self.assertFalse(engine.can_up_selected_task())
self.assertTrue(engine.can_down_selected_task())
db._conn.close()
os.remove(db_path)
| 43.768595
| 104
| 0.684904
| 2,730
| 21,184
| 5.049817
| 0.047253
| 0.129262
| 0.129987
| 0.04316
| 0.925794
| 0.906717
| 0.862252
| 0.825838
| 0.806543
| 0.76759
| 0
| 0.056009
| 0.190049
| 21,184
| 483
| 105
| 43.859213
| 0.747465
| 0.00203
| 0
| 0.763926
| 0
| 0
| 0.03231
| 0
| 0
| 0
| 0
| 0
| 0.464191
| 1
| 0.018568
| false
| 0
| 0.018568
| 0
| 0.039788
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
cd81b001a56489fec9c2eebbf869064d6d1f9828
| 1,465
|
py
|
Python
|
Python_Codes/customModule.py
|
arnelimperial/Code-Py
|
c48be58027e99f12a358644b45d502c8fcbd3b98
|
[
"Zlib"
] | null | null | null |
Python_Codes/customModule.py
|
arnelimperial/Code-Py
|
c48be58027e99f12a358644b45d502c8fcbd3b98
|
[
"Zlib"
] | null | null | null |
Python_Codes/customModule.py
|
arnelimperial/Code-Py
|
c48be58027e99f12a358644b45d502c8fcbd3b98
|
[
"Zlib"
] | null | null | null |
#!/usr/bin/env python3
import sys, os
def testme(*args):
for arg in args:
if len(arg) < 5:
print("The module says no.")
elif arg.isalnum() is False:
print("The module says no.")
elif arg.isalpha() is True:
print("The module says no.")
elif arg.isdigit() is True:
print("The module says no.")
else:
print("This string fits as password!")
sys.exit(0)
userinput = input("Give a string for testing: ")
if len(userinput) < 5:
print("The module says no.")
elif userinput.isalnum() is False:
print("The module says no.")
elif userinput.isalpha() is True:
print("The module says no.")
elif userinput.isdigit() is True:
print("The module says no.")
else:
print("This string fits for a password!")
sys.exit(0)
while True:
userinput = input("Give a string for testing: ")
if len(userinput) < 5:
print("The module says no.")
elif userinput.isalnum() is False:
print("The module says no.")
elif userinput.isalpha() is True:
print("The module says no.")
elif userinput.isdigit() is True:
print("The module says no.")
else:
print("This string fits for a password!")
sys.exit(0)
testme(userinput)
if __name__ == "__main__":
testme()
| 28.72549
| 56
| 0.542662
| 185
| 1,465
| 4.254054
| 0.237838
| 0.121982
| 0.213469
| 0.27446
| 0.850064
| 0.850064
| 0.850064
| 0.806862
| 0.806862
| 0.711563
| 0
| 0.007269
| 0.342662
| 1,465
| 50
| 57
| 29.3
| 0.809969
| 0.014334
| 0
| 0.714286
| 0
| 0
| 0.265419
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02381
| false
| 0.071429
| 0.02381
| 0
| 0.047619
| 0.357143
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
26a14005febc87bcc400a478160469375fc5b7c8
| 20,233
|
py
|
Python
|
controller/scheduler_controller/scheduler_controller.py
|
rohandhanraj/Auto-AI-Pipeline
|
d5f39715c802db45afae0d5978d228bf0bcd2f0a
|
[
"MIT"
] | null | null | null |
controller/scheduler_controller/scheduler_controller.py
|
rohandhanraj/Auto-AI-Pipeline
|
d5f39715c802db45afae0d5978d228bf0bcd2f0a
|
[
"MIT"
] | null | null | null |
controller/scheduler_controller/scheduler_controller.py
|
rohandhanraj/Auto-AI-Pipeline
|
d5f39715c802db45afae0d5978d228bf0bcd2f0a
|
[
"MIT"
] | null | null | null |
import os
import sys
from datetime import datetime, timedelta
from os import abort
from flask import render_template, redirect, url_for, jsonify, session, request
import threading
import json
import time
from project_library_layer.initializer.initializer import Initializer
from integration_layer.file_management.file_manager import FileManager
from cloud_storage_layer.aws.amazon_simple_storage_service import AmazonSimpleStorageService
from entity_layer.registration.registration import Register
from logging_layer.logger.log_request import LogRequest
from logging_layer.logger.log_exception import LogExceptionDetail
from entity_layer.project.project import Project
from entity_layer.project.project_configuration import ProjectConfiguration
import json
from entity_layer.scheduler.scheduler import Scheduler
import uuid
global process_value
class SchedulerController:
def __init__(self):
self.registration_obj = Register()
self.WRITE = "WRITE"
self.READ = "READ"
def get_scheduler_object(self):
self.scheduler = Scheduler()
return self.scheduler
def scheduler_index(self):
log_writer = LogRequest(executed_by=None, execution_id=str(uuid.uuid4()))
try:
if 'email_address' not in session:
log_writer.log_start(request)
log_writer.log_stop({'navigating': 'login'})
return redirect(url_for('login'))
log_writer.executed_by = session['email_address']
log_writer.log_start(request)
project_data = Project()
result = project_data.list_project()
project_list = None
if result['status']:
project_list = result.get('project_list', None)
result = {'message': None, 'message_status': 'info', 'status': 'True'}
if project_list is not None:
result.update({'project_list': project_list})
sch = Scheduler(socket_io=None)
job_result = sch.get_all_job()
job_detail = None
if job_result['status']:
job_detail = job_result.get('job_list', None)
is_job_detail_found = False
if job_detail is not None:
is_job_detail_found = True
result.update({'is_job_detail_found': is_job_detail_found, 'job_detail': job_detail})
log_writer.log_stop(result)
return render_template("scheduler_manager.html",
context=result)
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
file_name = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
exception_type = e.__repr__()
exception_detail = {'exception_type': exception_type,
'file_name': file_name, 'line_number': exc_tb.tb_lineno,
'detail': sys.exc_info().__str__()}
print(exception_detail)
if log_writer is not None:
log_writer.log_stop({'status': False, 'error_message': str(e)})
log_exception = LogExceptionDetail(log_writer.executed_by, log_writer.execution_id)
log_exception.log(str(e))
return render_template('error.html',
context={'message': None,'status ':False,'message_status': 'info', 'error_message': exception_detail.__str__()})
def scheduler_ajax_index(self):
log_writer = LogRequest(executed_by=None, execution_id=str(uuid.uuid4()))
try:
if 'email_address' not in session:
log_writer.log_start(request)
log_writer.log_stop({'navigating': 'login'})
return redirect(url_for('login'))
log_writer.executed_by = session['email_address']
log_writer.log_start(request)
project_data = Project()
result = project_data.list_project()
project_list = None
if result['status']:
project_list = result.get('project_list', None)
result = {'message': None, 'message_status': 'info', 'status': 'True'}
if project_list is not None:
result.update({'project_list': project_list})
sch = Scheduler(socket_io=None)
job_result = sch.get_all_job()
job_detail = None
if job_result['status']:
job_detail = job_result.get('job_list', None)
is_job_detail_found = False
if job_detail is not None:
is_job_detail_found = True
result.update({'is_job_detail_found': is_job_detail_found, 'job_detail': job_detail})
log_writer.log_stop(result)
return render_template("scheduler_manager_ajax.html",
context=result)
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
file_name = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
exception_type = e.__repr__()
exception_detail = {'exception_type': exception_type,
'file_name': file_name, 'line_number': exc_tb.tb_lineno,
'detail': sys.exc_info().__str__()}
print(exception_detail)
if log_writer is not None:
log_writer.log_stop({'status': False, 'error_message': str(e)})
log_exception = LogExceptionDetail(log_writer.executed_by, log_writer.execution_id)
log_exception.log(str(exception_detail))
return render_template('error.html',
context={'message': None, 'status ': False, 'message_status': 'info',
'error_message': exception_detail.__str__()})
def add_job_at_specific_time(self):
log_writer = None
execution_id = str(uuid.uuid4())
try:
if 'email_address' in session:
log_writer = LogRequest(executed_by=None, execution_id=str(uuid.uuid4()))
log_writer.executed_by = session['email_address']
log_writer.execution_id = execution_id
log_writer.log_start(request)
result = self.registration_obj.validate_access(session['email_address'], operation_type=self.WRITE)
if not result['status']:
log_writer.log_stop(result)
return jsonify(result)
del result
data = json.loads(request.data)
project_id = int(data['project_id'])
job_name = data['job_name']
print(job_name)
date_time = data['date_time']
executed_by = session['email_address']
action_name = data['action_name'].split(',')[:-1]
log_writer = LogRequest(executed_by=executed_by, execution_id=execution_id)
log_writer.log_start(dict(data))
res = self.scheduler.add_job_at_time(date_time=date_time, job_name=job_name, project_id=project_id,
email_address=executed_by, action_name=action_name)
if res:
log_writer.log_stop(
{'status': True, 'message': "Job <{}> created at <{}>".format(job_name, date_time)})
return jsonify({'status': True, 'message': "Job <{}> created at <{}>".format(job_name, date_time)})
else:
return jsonify({'status': False, 'message': 'Failed while creating job'})
else:
return jsonify({'status': True, 'message': "Please login to your account"})
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
file_name = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
exception_type = e.__repr__()
exception_detail = {'exception_type': exception_type,
'file_name': file_name, 'line_number': exc_tb.tb_lineno,
'detail': sys.exc_info().__str__()}
print(exception_detail)
if log_writer is not None:
log_writer.log_stop({'status': False, 'error_message': str(e)})
log_exception = LogExceptionDetail(log_writer.executed_by, log_writer.execution_id)
log_exception.log(str(exception_detail))
return jsonify({'status': False, 'message': 'Error occurred [{}]'.format(str(exception_detail))})
def add_job_within_a_day(self):
log_writer = None
execution_id = str(uuid.uuid4())
try:
if 'email_address' in session:
log_writer = LogRequest(executed_by=None, execution_id=str(uuid.uuid4()))
log_writer.executed_by = session['email_address']
log_writer.execution_id = execution_id
log_writer.log_start(request)
result = self.registration_obj.validate_access(session['email_address'], operation_type=self.WRITE)
if not result['status']:
log_writer.log_stop(result)
return jsonify(result)
del result
data = json.loads(request.data)
project_id = int(data['project_id'])
job_name = data['job_name']
time_type = data['time_type']
time_value = int(data['time_value'])
is_reoccurring = data['is_reoccurring']
executed_by = session['email_address']
action_name = data['action_name'].split(',')[:-1]
log_writer = LogRequest(executed_by=executed_by, execution_id=execution_id)
log_writer.log_start(dict(data))
if is_reoccurring == 'No':
date_time_val = None
if time_type == 'hour':
date_time_val = datetime.now() + timedelta(hours=time_value)
if time_type == 'minute':
date_time_val = datetime.now() + timedelta(minutes=time_value)
if time_type == 'second':
date_time_val = datetime.now() + timedelta(seconds=time_value)
if date_time_val is None:
raise Exception("Date time required!")
date_time_val = str(date_time_val)
res = self.scheduler.add_job_at_time(date_time=date_time_val, job_name=job_name,
project_id=project_id,
email_address=executed_by,
action_name=data['action_name'].split(',')[:-1])
if res:
log_writer.log_stop(
{'status': True, 'message': "Job <{}> created at <{}>".format(job_name, date_time_val)})
return jsonify(
{'status': True, 'message': "Job <{}> created at <{}>".format(job_name, date_time_val)})
else:
return jsonify({'status': False, 'message': 'Failed while creating job'})
else:
res = False
if time_type == 'hour':
res = self.scheduler.add_recurring_job_in_hour(time_value, job_name=job_name,
project_id=project_id,
email_address=executed_by,
action_name=data['action_name'].split(',')[:-1])
if time_type == 'minute':
res = self.scheduler.add_recurring_job_in_minute(time_value, job_name=job_name,
project_id=project_id,
email_address=executed_by,
action_name=data['action_name'].split(',')[
:-1])
if time_type == 'second':
res = self.scheduler.add_recurring_job_in_second(time_value, job_name=job_name,
project_id=project_id,
email_address=executed_by,
action_name=data['action_name'].split(',')[
:-1])
if res:
log_writer.log_stop(
{'status': True,
'message': "Recurring job <{}> created at interval of <{}>".format(job_name, time_value)})
return jsonify(
{'status': True,
'message': "Recurring job <{}> created at interval of <{}>".format(job_name, time_value)})
else:
return jsonify({'status': False, 'message': 'Failed while creating job'})
else:
return jsonify({'status': True, 'message': "Please login to your account"})
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
file_name = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
exception_type = e.__repr__()
exception_detail = {'exception_type': exception_type,
'file_name': file_name, 'line_number': exc_tb.tb_lineno,
'detail': sys.exc_info().__str__()}
print(exception_detail)
if log_writer is not None:
log_writer.log_stop({'status': False, 'error_message': str(exception_detail)})
log_exception = LogExceptionDetail(log_writer.executed_by, log_writer.execution_id)
log_exception.log(str(exception_detail))
return jsonify({'status': False, 'message': 'Error occurred [{}]'.format(str(exception_detail))})
def add_job_in_week_day(self):
print("job_on_week_day")
log_writer = None
execution_id = str(uuid.uuid4())
try:
if 'email_address' in session:
log_writer = LogRequest(executed_by=None, execution_id=str(uuid.uuid4()))
log_writer.executed_by = session['email_address']
log_writer.execution_id = execution_id
log_writer.log_start(request)
result = self.registration_obj.validate_access(session['email_address'], operation_type=self.WRITE)
if not result['status']:
log_writer.log_stop(result)
return jsonify(result)
del result
data = json.loads(request.data)
project_id = int(data['project_id'])
job_name = data['job_name']
week_day_names = data['week_day_names'][:-1]
is_reoccurring = data['is_reoccurring']
executed_by = session['email_address']
log_writer = LogRequest(executed_by=executed_by, execution_id=execution_id)
log_writer.log_start(dict(data))
res = self.scheduler.add_recurring_job_weekly_basis(is_reoccurring=is_reoccurring,
days_of_week=week_day_names,
job_name=job_name, project_id=project_id,
email_address=executed_by,
is_record_inserted=False,
action_name=data['action_name'].split(',')[:-1]
)
if res:
log_writer.log_stop(
{'status': True, 'message': "Job <{}> created at for week days <{}>"
.format(job_name, week_day_names)})
return jsonify({'status': True, 'message': "Job <{}> created at for week days <{}>"
.format(job_name, week_day_names)})
else:
return jsonify({'status': False, 'message': 'Failed while creating job'})
else:
return jsonify({'status': True, 'message': "Please login to your account"})
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
file_name = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
exception_type = e.__repr__()
exception_detail = {'exception_type': exception_type,
'file_name': file_name, 'line_number': exc_tb.tb_lineno,
'detail': sys.exc_info().__str__()}
print(exception_detail)
try:
if log_writer is not None:
log_writer.log_stop({'status': False, 'error_message': str(exception_detail)})
log_exception = LogExceptionDetail(log_writer.executed_by, log_writer.execution_id)
log_exception.log(str(exception_detail))
return jsonify({'status': False, 'message': 'Error occurred [{}]'.format(str(exception_detail))})
except Exception as e:
return jsonify({'status': False, 'message': str(e)})
def remove_existing_job(self):
print("job_on_week_day")
log_writer = None
execution_id = str(uuid.uuid4())
try:
if 'email_address' in session:
log_writer = LogRequest(executed_by=None, execution_id=str(uuid.uuid4()))
log_writer.executed_by = session['email_address']
log_writer.execution_id = execution_id
log_writer.log_start(request)
result = self.registration_obj.validate_access(session['email_address'], operation_type=self.WRITE)
if not result['status']:
log_writer.log_stop(result)
return jsonify(result)
del result
data = json.loads(request.data)
job_id = data['job_id']
res=self.scheduler.remove_job_by_id(job_id=job_id)
if res:
result={'status':True,'message':f"Job id:<{job_id}> has been canceled"}
log_writer.log_stop(result)
return jsonify(result)
else:
return jsonify({'status': False, 'message': 'Failed while creating job'})
else:
return jsonify({'status': True, 'message': "Please login to your account"})
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
file_name = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
exception_type = e.__repr__()
exception_detail = {'exception_type': exception_type,
'file_name': file_name, 'line_number': exc_tb.tb_lineno,
'detail': sys.exc_info().__str__()}
print(exception_detail)
if log_writer is not None:
log_writer.log_stop({'status': False, 'error_message': str(exception_detail)})
log_exception = LogExceptionDetail(log_writer.executed_by, log_writer.execution_id)
log_exception.log(str(exception_detail))
return jsonify({'status': False, 'message': 'Error occurred [{}]'.format(str(exception_detail))})
| 54.389785
| 147
| 0.537439
| 2,100
| 20,233
| 4.845238
| 0.084286
| 0.062801
| 0.035381
| 0.029877
| 0.863096
| 0.843243
| 0.832924
| 0.823194
| 0.817887
| 0.817887
| 0
| 0.001873
| 0.366826
| 20,233
| 372
| 148
| 54.389785
| 0.792366
| 0
| 0
| 0.794203
| 0
| 0
| 0.111594
| 0.002422
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023188
| false
| 0
| 0.055072
| 0
| 0.168116
| 0.026087
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
26c1ee3362b376fac9c53fe5df64896592291929
| 19,899
|
py
|
Python
|
plots_hw2.py
|
restom10/Hw2
|
4070d667f0346728b624eaf20c763953590091d1
|
[
"MIT"
] | null | null | null |
plots_hw2.py
|
restom10/Hw2
|
4070d667f0346728b624eaf20c763953590091d1
|
[
"MIT"
] | null | null | null |
plots_hw2.py
|
restom10/Hw2
|
4070d667f0346728b624eaf20c763953590091d1
|
[
"MIT"
] | null | null | null |
{
"cells": [
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [],
"source": [
"import numpy as np\n",
"import matplotlib.pyplot as plt\n",
"\n",
"\n",
"u1 = np.genfromtxt('U1.txt',delimiter=';')\n",
"u2 = np.genfromtxt('U2.txt',delimiter=';')\n",
"u3 = np.genfromtxt('U3.txt',delimiter=';')"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [],
"source": [
"w1=u1[:,0]\n",
"max1=u1[:,1]\n",
"w2=u2[:,0]\n",
"max2=u2[:,1]\n",
"w3=u3[:,0]\n",
"max3=u3[:,1]"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAXgAAAEICAYAAABVv+9nAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzt3Xd8leXZwPHflZ2QPYCQQRLDXgEiokAZjhdx1ypqtVq31db1Vi1aR7WtthW1xfFSrbhq1aoVrWgVEFArEmTJUGZCIIGQHSD7fv94TkIICTk5OTvX9/M5n7OecZ3AuXLnfu77usUYg1JKKf8T4OkAlFJKuYYmeKWU8lOa4JVSyk9pgldKKT+lCV4ppfyUJnillPJTmuCV3xGRDBExIhJke75IRK5s8/4jInJARIpFJF1EakQksItjThGR71wdu1LOJDoOXvkKEdkF9AOa2ry8wBhzS7vtMoCdQLAxprHde+nAd8BAY8x+V8arlKcFeToApbrpHGPMpz3YPx0o1eSuegPtolE+T0QCReRPtm6XHcBZ7d7/TESuFZHTgE+AAbZumQUddOfEi8iLIrJXRMpF5F+216eJSGGbYw6zHbdCRDaKyLlt3pslIptEpFpE9ojI/7rlB6FUO9qCV/7gOuBsYCxwEHi7o42MMZ+KyJnAq8aYVGjtzmnrFaAGGGG7P6X9cUQkGHgf+BtwBjAZeE9Eco0x3wEvABcbY1aISByQ2dMPqJQjtAWvfM2/bK3mltt1wMXAk8aY3caYMuD3jhxYRJKBM4EbjTHlxpgGY8yyDjadCEQCjxpj6o0xS4APgEtt7zcAw0Uk2nacbxyJR6me0gSvfM35xpjYNre/AgOA3W22yXfw2GlAmTGmvIvtBgC7jTHN7c6ZYnt8ITALyBeRZSJysoPxKNUjmuCVPyjCSs4t0h08zm4gXkRiu9huL5AmIm2/P+nAHgBjzCpjzHlAX+BfwJsOxqNUj2iCV/7gTeAXIpJq6/O+x5GDGGOKgEXAMyISJyLBIvKDDjZdCRwC7rJtMw04B/iHiISIyI9FJMYY0wBUAc0dHEMpl9MEr3zN+7YRMC23d4G/Ah8D64BvgHd6cPwrsPrQtwD7gdvab2CMqcdK6GcCB4BngJ8YY7a0OcYuEakCbgR+3IN4lHKYTnRSSik/pS14pZTyU5rglVLKT2mCV0opP6UJXiml/JRbSxUkJiaajIwMd55SKaV83urVqw8YY5K6u59bE3xGRgZ5eXnuPKVSSvk8EXFodrZ20SillJ/SBK+UUn5KE7xSSvkprQevlHKKhoYGCgsLqa2t9XQoPissLIzU1FSCg4OdcjxN8EoppygsLCQqKoqMjAxExNPh+BxjDKWlpRQWFpKZ6Zw1YrSLRinlFLW1tSQkJGhyd5CIkJCQ4NS/gDTBK6WcRpN7zzj759dlgheRMBH5WkTW2RYXfsj2+gIR2Skia223HKdGpjyjeAOsegEObAWtNKqUT7OnBV8HzDDGjAFygJkiMtH23i+NMTm221qXRanc5z+/hn/fAfNy4YmR8N7N8O3bcLDU05Ep1aXAwEBycnJab48++ugx23z22WecffbZACxcuLB1m5KSEk466STGjh3LihUrmDVrFhUVFZ2e67nnnuPll1+2O7bly5czbtw4goKC+Oc//9nNT+aYLi+yGqtgfI3tabDtpk07f2QMFK+HoWdD9qmwfSlsfh/WvAoIJI+GrOlwwnQYOAkCnXOlXylnCQ8PZ+1a+9ua5557Lueeey4AixcvZtSoUTz//PMATJky5bj73njjjd2KLT09nQULFvCnP/2pW/v1hF198CISKCJrsVa4+cQYs9L21m9FZL2IPCEioZ3se72I5IlIXklJiZPCVi5RtRcOlULmVMi9Gma/AnfthGsXw/R7ISQS/jsPXj4PFt3l6WiVsttHH33E0KFDGTduHO+8c2TBrwULFnDLLbewdu1a7rrrLt577z1ycnI4fPgwGRkZHDhwAICXX36Z0aNHM2bMGK644goAHnzwwdZkvXbtWiZOnMjo0aO54IILKC8/dt32jIwMRo8eTUCA+y592jVM0hjTBOTYFiN+V0RGAr8CioEQYD5wN/CbDvadb3uf3Nxcbfl7s+IN1n3y6COvBQRCaq51m/pLqKuBd2+ATe/BrMfBjf9Zle946P2NbNpb5dRjDh8QzQPnjDjuNocPHyYn58jlwF/96lecd955XHfddSxZsoTs7Gxmz559zH45OTn85je/IS8vj3nz5h313saNG3nkkUf48ssvSUxMpKys7Jj9f/KTn/CXv/yFqVOncv/99/PQQw/x5JNPOvhJnadb305jTAWwFJhpjCkyljrgRWCCKwJUbtSS4Psd50sUGglDz7Ja+vs3uicupezU0kXTcps9ezZbtmwhMzOTQYMGISJcfvnl3TrmkiVLuOiii0hMTAQgPj7+qPcrKyupqKhg6tSpAFx55ZUsX77cOR+oh7pswYtIEtBgjKkQkXDgdOAxEUk2xhSJNa7nfOBbF8eqXK14HcRnQWjU8bfLtP4js2MZ9B/l+riUz+mqpa3cw54WfDKwVETWA6uw+uA/AF4TkQ3ABiAReMR1YSq3KN4A/Ud3vV1MCiQMgp3LXB+TUj00dOhQdu3axfbt2wF4/fXXu7X/jBkzeOuttygttUaSte+iiYmJIS4ujhUrVgDwyiuvtLbmPc2eUTTrgbEdvD7DJREpz6ithPJdMPYK+7bPmgrr/gFNDTqaRnmN9n3wM2fO5NFHH2X+/PmcddZZREREMGXKFKqrq+0+5ogRI7j33nuZOnUqgYGBjB07lgULFhy1zUsvvcSNN97IoUOHyMrK4sUXXzzmOKtWrWq9APv+++/zwAMPsHGja7s5xbhxMktubq7RBT+81K4vYMEs+PE/YdDpXW+/aSG8eQVc/TGkT+x6e+X3Nm/ezLBhwzwdhs/r6OcoIquNMbndPZYOgVCWlgus9vapZ0wGBHZ85qqIlFI9pAleWYrXQ58kiOxn3/YR8ZA8xrrQqpTySprglaV4vXWBtTvFjrKmQuEqqD/ouriUUg7TBK+gsR72b+n+kMfMqdDcAPn/dU1cSqke0QSvoGSLlaiT7Rgi2Vb6yRAYAjs/c0lYSqme0QSv2lxg7WaCD4mA1AnaD6+Ul9IEr6wEHxxhzWLtrqyp1v6Hjq3PoZS7eXO54Llz5zJ8+HBGjx7NqaeeSn5+fjc/XffpmqzKusDab6RVWKy7MqfC0t/CzuUw4nznx6ZUN3hzueCxY8eSl5dHREQEzz77LHfddRdvvPFGt47RXdqC7+2MsZUocLCmTMo4CInSsgXKq3lDueDp06cTEREBwMSJEyksLHT1x9YWfK9XvgvqqhxP8IHBkDFJ++HV0Rbdc+TajrP0HwVnHtvl0pavlAt+4YUXOPPMM7v6xD2mCb6366gGfHdlToXvP4KK3RCb5py4lHJAR100a9eubS0XDHD55Zczf/58u4/pSLngiy66qNPjvfrqq+Tl5bFsmesbRZrge7viDSCB0He448fIslXO27kMxnav1rbyU120tHurTz/9lN/+9rcsW7aM0NAOF8FzKu2D7+2K10PiYAgOd/wYfYdbZQ60m0Z5IW8pF7xmzRpuuOEGFi5cSN++fR35KN2mLfjerniDtYB2T4hA5g+sFrwx3St3oJQTeXO54F/+8pfU1NS0dt+kp6ezcOFCxz6onbRccG92sBT+mAWnPwyTftGzY61+Cd7/BfxsJfQd6pz4lE/RcsHOoeWClXMUr7fue3KBtUXbfnillFfQBN+bOVqioCNxGRA7UPvhlfIimuB7s+L1EJ1q1XZ3hqypsOtzaGp0zvGUUj3SZYIXkTAR+VpE1onIRhF5yPZ6poisFJFtIvKGiIS4PlzlVD2ZwdqRzKlQVwlF65x3TKWUw+xpwdcBM4wxY4AcYKaITAQeA54wxmQD5cA1rgtTOV3DYTjwvfMTPGj5YKW8RJcJ3lhqbE+DbTcDzAD+aXv9JUArTfmSfZvANDvnAmuLyCSraJn2wyvlFezqgxeRQBFZC+wHPgG2AxXGmJbO1kIgpZN9rxeRPBHJKykpcUbMyhlaRtA4swUPViu+4CvrLwSl3MybywU/99xzjBo1ipycHCZPnsymTZu6+em6z66JTsaYJiBHRGKBdwG7BzobY+YD88EaB+9IkMoFitdDaIw18sWZsqbCV0/D7pWQNc25x1aqC95cLviyyy5r3WfhwoXccccdfPTRR906Rnd1axSNMaYCWAqcDMSKSMsviFRgj5NjU67UcoHV2bNOB54CAUHaTaO8ijeUC46Ojm59fPDgQcQNM767bMGLSBLQYIypEJFw4HSsC6xLgR8B/wCuBN5zZaDKiZqbYN9GGHel848dGgUp43XCUy/32NePsaVsi1OPOTR+KHdPuPu423h7ueCnn36auXPnUl9fz5IlS+z96A6zpwWfDCwVkfXAKuATY8wHwN3AHSKyDUgAXnBdmMqpSrdDwyHnXmBtK3Mq7F0Dhzvvv1TKFVq6aFpus2fPZsuWLa3lgkWEyy/vXsVTR8oFL1++vMNj3XzzzWzfvp3HHnuMRx55xIFP2D1dtuCNMeuBsR28vgOY4IqglIu56gJri6ypsPwPkP8FDD3LNedQXq2rlnZvd8kll3DTTTe5/Dw6k7U3Kt4AgSGQOMQ1x0890Tr+7pWuOb5S3eAt5YK3bt3a+vjf//536wIkrqTlgnuj4vWQNBSCXDT5OCgU4rOsriCl3MibywXPmzePTz/9lODgYOLi4njppZcc/pz20nLBvY0x8MdsGDwTzn/adef5x4+hdBvcrK343kLLBTuHlgtWjqsuhkMHXHeBtUXCCVC2wxqxo5TyCE3wvU1riWAXXWBtkZANTfVQudu151FKdUoTfG/TMoKm30jXnich27rXfvhexZ1dvv7I2T8/TfC9TfF6iMuEsOiut+0JTfC9TlhYGKWlpZrkHWSMobS0lLCwMKcdU0fR9DbOrgHfmT5JEBJlXWhVvUJqaiqFhYVoUUHHhYWFkZqa6rTjaYLvTeqqrQufOZe5/lwi1oVWTfC9RnBwMJmZmZ4OQ7WhXTS9SfG31r0z1mC1R0K2JnilPEgTfG/irhE0LRKyoaIAGuvccz6l1FE0wfcmpVutGvBRye45X0I2YKBsp3vOp5Q6iib43qQ8H+LSnV8DvjMJJ1j32k2jlEdogu9NKvKdv4LT8WiCV8qjNMH3FsZY/eHuTPBhMdCnryZ4pTxEE3xvcfCAtchHnBsTPNhG0uhkJ6U8QRN8b1GRb93Hprv3vAknQJkmeKU8QRN8b1G+y7p3ZxcNWC34mn1QW+Xe8yqlNMH3GhUF1r3bW/C2mjTailfK7bpM8CKSJiJLRWSTiGwUkVttrz8oIntEZK3tNsv14SqHVeRDRCKERrr3vFp0TCmPsacWTSNwpzHmGxGJAlaLyCe2954wxvzJdeEppynPd3/rHSAuAxAdSaOUB3SZ4I0xRUCR7XG1iGwGUlwdmHKyigLXr+LUkeAwiE3TBK+UB3SrD15EMoCxQMtCm7eIyHoR+ZuIxHWyz/UikicieVpG1EOam62Vldx9gbWFFh1TyiPsTvAiEgm8DdxmjKkCngVOAHKwWviPd7SfMWa+MSbXGJOblJTkhJBVt1UXWcvneaKLBo6MhdeFIJRyK7sSvIgEYyX314wx7wAYY/YZY5qMMc3AX4EJrgtT9UjLCBp3T3JqkZANdVVwUP+CU8qd7BlFI8ALwGZjzNw2r7ctSXgB8K3zw1NO0TrJKcMz59eaNEp5hD2jaCYBVwAbRGSt7bU5wKUikgMYYBdwg0siVD1XbkvwMc5bCqxbWodKboOBp3gmBqV6IXtG0XwOdFRf9kPnh6NcoqLAqgEf7LzFfLslJg0CQ7QFr5Sb6UzW3sDdZYLbCwiE+Cyd7KSUm2mC7w08NcmpLa0qqZTbaYL3d02NULXHcyNoWiScAGU7oLnJs3Eo1Ytogvd3VYVgmjzbRQNWC76pDioLPRuHUr2IJnh/V+6hOvDttR1Jo5RyC03w/q5lDLynu2jiW8bCaz+8Uu6iCd7fVRSABEK0h8bAt4jsCyFR2oJXyo00wfu78nyISYFAe+a0uZCIdaFVE7xSbqMJ3t95egx8W1pVUim30gTv7yoKvCvBVxRAY52nI1GqV9AE788aaq1SwZ6+wNoiIRswULbT05Eo1Stogvdnlbute08PkWyhVSWVcitN8P6stUywt7TgbQm+TIdKKuUOmuD9WbmXjIFvERYDffpqC14pN9EE788q8q0yvZH9PR3JEVp0TCm30QTvzyoKrFrsAV70z6xj4ZVyGy/65iunK8/3nu6ZFgnZULMPaqs8HYlSfk8TvD+r8II68O21FB3TC61KuZwmeH9VVwOHSr1nBE2LBC06ppS7dJngRSRNRJaKyCYR2Sgit9pejxeRT0Rkq+0+zvXhKrtVFFj33tZFE5cJiPbDK+UG9rTgG4E7jTHDgYnAzSIyHLgHWGyMGQQstj1X3sLbxsC3CA6D2DRN8Eq5QZcJ3hhTZIz5xva4GtgMpADnAS/ZNnsJON9VQSoHlHtpggctOqaUm3SrD15EMoCxwEqgnzGmyPZWMdCvk32uF5E8EckrKSnpQaiqWyoKIDgC+iR6OpJjtYyFN8bTkSjl1+xO8CISCbwN3GaMOWqMmzHGAB1+W40x840xucaY3KSkpB4Fq7qhpUywiKcjOVZCNtRVwcEDno5EKb9mV4IXkWCs5P6aMeYd28v7RCTZ9n4ysN81ISqHlHvhEMkWWnRMKbewZxSNAC8Am40xc9u8tRC40vb4SuA954enHFZR4H0jaFroAtxKuYU967hNAq4ANojIWttrc4BHgTdF5BogH7jYNSGqbjtcDnWV3nmBFazyCYEhmuCVcrEuE7wx5nOgs47cU50bjnKK1hE0XtpFExAI8Vma4JVyMZ3J6o+8dZJTW1pVUimX0wTvj7x1klNbCSdA2Q5obvJ0JEr5LU3w/qg8H0JjIDzW05F0LiEbmuqgstDTkSjltzTB+6OKAojz0v73FvEtQyW3ejYOpfyYJnh/1DLJyZvFZ1r3Fbs9G4dSfkwTvL8xxmrBe3uCj+wHEghVez0diVJ+SxO8vzlYAg2HvHsEDVhDJaP6a4JXyoU0wfubliGS3t6CB4geAFV7PB2FUn5LE7y/Kd9l3Xt7Cx5sCV5b8Eq5iiZ4f9MyBj4mzbNx2CM6xWrBa9lgpVxCE7y/qSiAiEQIjfR0JF2LHgD1NVbpYKWU02mC9zfl+b7RPQNWggftplHKRTTB+5sKL64D3150inWvF1qVcglN8P6kudmaOOQLI2hAW/BKuZgmeH9SXQTNDb7TRRPZHxBN8Eq5iCZ4f1Lh5XXg2wsKgci+2kWjlItogvcnrZOcMjwaRrfoWHilXEYTvD9pXcnJB8bAt4hO0QSvlItogvcnFfkQlQxBoZ6OxH5arkApl+kywYvI30Rkv4h82+a1B0Vkj4istd1muTZMZZdyHygT3F50CtRWQl2NpyNRyu/Y04JfAMzs4PUnjDE5ttuHzg1LOaSiwHdG0LRoGQtfXeTZOJTyQ10meGPMcqDMDbGonmhqgKpC3xlB06J1LLx20yjlbD3pg79FRNbbunDiOttIRK4XkTwRySspKenB6dRxVRaCafbBLhqd7KSUqzia4J8FTgBygCLg8c42NMbMN8bkGmNyk5KSHDyd6lLLEElf66KJSrbutQWvlNM5lOCNMfuMMU3GmGbgr8AE54aluq18p3Xvay344DCr+mWlJnilnM2hBC8iyW2eXgB829m2yk1Kt0NgqG/UgW9PJzsp5RJBXW0gIq8D04BEESkEHgCmiUgOYIBdwA0ujFHZo3Q7xGdBgA9ObYhOsa4hKKWcqssEb4y5tIOXX3BBLKonSrdB4iBPR+GY6AGwe6Wno1DK7/hgc08do7nJ6oNPOMHTkTgmegAcLoOGw56ORCm/ogneH1TuhqZ6SMj2dCSOaV34Q/vhlXImTfD+oHSbde+zCV7HwivlCprg/UHpduveZxO8tuCVcgVN8P6gdDuEREEfH51IFq2TnZRyBU3w/qB0m3WBVcTTkTgmpA+ExWoLXikn0wTvD0q3+W73TAtd+EMpp9ME7+sa66xRND6f4HXhD6WcTRO8ryvfZVWR9NUx8C20XIFSTqcJ3te1DpH09QSfAgf3Q2O9pyNRym9ogvd1LQk+3scTfIyu7KSUs2mC93Wl26xyu+Gxno6kZ3RlJ6WcThO8ryvd4fsXWEEnOynlAprgfZ0/DJEEbcEr5QKa4H1ZXTXUFPv+BVaA0CgIjdYWvFJOpAnel5XtsO79IcGDjoVXysk0wfsyX68i2Z6OhVfKqTTB+7KWKpLxWZ6Nw1k0wSvlVJrgfVnpNohOheBwT0fiHNEpUF0MTQ2ejkQpv9BlgheRv4nIfhH5ts1r8SLyiYhstd3HuTZM1aHS7f7T/w62kTQGavZ5OhKl/II9LfgFwMx2r90DLDbGDAIW254rdzIGSrf6T/876Fh4pZysywRvjFkOlLV7+TzgJdvjl4DznRyX6sqhMqit9LMEr2PhlXKmIAf362eMaSkaUgz062xDEbkeuB4gPT3dwdOpY9hG0Jj4LMoP1lNYfojKww0MT44mITLUw8E5SNdmVcqpHE3wrYwxRkTMcd6fD8wHyM3N7XQ71TljDJuKqth54CCF5YcpLD9Exu5PuBY489UitjR8ctT2mYl9GD8wjvED48gdGMcJSZEEBBx/tafGpmb2VddRVHGYpKhQBib0ceEn6kRYLARHaIJXykkcTfD7RCTZGFMkIsnAfmcGpSzGGBZv3s+8pdtYu7ui9fWY8GDmhObTRCCTTxzPxfFRpMaFExkaxPo9leTtKmfJlv38c3Vh6/bj0mMZPzCOjMQ+7KuyEnlRZS17Kw9TVFHL/upamm2/fgMDhGsmZ3LbaYOICOlxG8B+IjrZSSkncvTbuxC4EnjUdv+e0yJSNDUbPvq2mHlLt7G5qIrUuHAePm8EuRnxpMSFEx0WDG++Cvsyue/c0Ufte0p2Iky1fjnsPHCQ1fnlrbel35W0bhcaFEBKbDjJsWFMHpTIgJgwkmPD6R8Txn82FjN/+Q7+vb6Ih88fwYyhnfbAOZ+OhVfKabpM8CLyOjANSBSRQuABrMT+pohcA+QDF7syyN6ioamZhWv38vRn29hRcpCspD48ftEYzs0ZQHBgu+vhpTuOWwNeRMhKiiQrKZKLctMAqDhUz96KWvrHhBEXEYx0skj39CF9+eG4VOa8s4GrF+Qxa1R/HjhnBP2iw5z2WTsVnQI7V7j+PEr1Al0meGPMpZ28daqTY+m16hqb+OfqQp5btp3dZYcZ2j+KeZeN5cyRyQR21Hfe3Axl2yHzB906T2xECLERIXZte2JGPP/+xRT+umIHf168leXfH+CumUP48UkDO47JWaIHWIt+NDdBQKDrzqNUL+DGDlbVljGGNbsreH/dXt5fV8SBmjrGpMXywNkjOHVY305b14CVABsOuXySU0hQADdPz+bs0cnc969vuf+9jby9upDf/XAUIwbEuOak0QPANEHNfohOds05lOolNMG7kTGGLcXVVlJfv5fdZYcJCQpg+pAkLp84kMnZicdP7C3cvA7rwIQ+vHz1BBau28vDH2zi3HlfcMfpg7l5ugvG4Led7KQJXqke0QTvBrsOHOT9dXtZuG4vW/fXEBggTMpO5NZTB3PGiH7WRdPuKLMVGXPjJCcR4bycFKYN7st9733LHz/+DsD5Sb41we8Bxjv32Er1MprgXeBwfRNf7yrji20H+HzrATYVVQFwYkYcD583gjNHJZPYk8lIpdshKByiBjgpYvvFRATz5OwcAgT++PF3hAUHcs3kTOedQMsVKOU0muCdoLGpmfV7Kvli6wE+33aANQUV1Dc1ExwojEuPY86soZw9egADYp1U9bF0m9U9E+CZYqCBAcLjF42hvrGZhz/YRGhQAJdPHOicg0fEQ2CojoVXygk0wffA51sPsODLXazcUUp1XSMAIwZEc9WkDCZlJ3JiRpxrJgqVboO+w51/3G4ICgzgqUvGUv/qau7717eEBAVwsW04Zo+0TnbSFrxSPaUJ3kErd5Ry9YJVJESGcPaYAUzOTuTkExKI72PfMESHNTVC+S4Ydq5rz2OHkKAAnv7xOK57OY+7315PaFAA5+Wk9PzA0Sma4JVyAk3wDti6r5rrXs4jLT6ct286xe6x5U5RkQ/NjV5TRTIsOJD5V+Ry1Ytfc8eb6wgNCmDmyB6OfokeALtXOidApXoxXdGpm/ZV1XLVi6sIDQ5kwU8nuDe5w5Fl+rwkwQOEhwTyt6tOZExqDD9/fQ1LtvRwwY7WyU7NzglQqV5KE3w31NQ18tMXV1F+qJ4XrzqRtPgI9wfROkTSu1Zy6hMaxIKrJzC0fzQ3vvoNn2894PjBolOgqR4OlTovQKV6IU3wdmpoauamV1fz3b5qnvnxOEamuGgmZ1dKt0FYDEQkeOb8xxEdFszLV08gK7EP1768itX57deJsfdAuvCHUs6gCd4OxhjmvLOBFVsP8PsLRjFtSF/PBVO6zeqesWfGqwfE9Qnh1WtPol90GL94fS1VtQ4soK0LfyjlFJrg7fDkp1t5a3Uht546iItPdMJQwJ4o3X7cKpLeIDEylCdn51BcVcuD723s/gGOms2qlHKUJvguvLGqgKcWb+Wi8ancdtogzwbTcBgqC73qAmtnxqbHccv0bN5Zs4d/ry/qeoe2+iRBQJAmeKV6SBP8cSz9bj9z3v2WHwxO4nc/HGVfITBXKtsJGK+7wNqZW2ZkMyY1hnv/tYF9VbX27xgQYJVh0C4apXpEE3wnvt1Tyc2vfcPQ/lE88+Nxxy644QmtVSS9vwUPEBwYwBOzc6htaOJ/31qHMd1YkldnsyrVY16QtbxPQ1Mzd765juiwYF686kQiQ71kPpibywQ7Q1ZSJPeeNZwVWw/w8n/z7d9R12ZVqsc0wXfghc938t2+ah4+fyR93bFMnb3KtkNkPwiN8nQk3XL5SelMG5LE7z7czLb9Nfbt1NKC706rXyl1FE3w7ewuO8STn36wNITkAAAURklEQVTP6cP7cfpwNy42bY/S7Z12zxhjaGh2YEiiG4gIf7hwNBEhgdz+xlrqG+2YoRqdAo21cLjc9QEq5ad61PcgIruAaqAJaDTG5DojKE8xxvDgwo0EiPDguSM8HQ5gxfR9+fes2b+G0tpdVMVlULniHqrqqqisr6Sqroqq+iqq6qpoNI3Eh8WTEplCcp9k6z4y+ajnEcER1DfVU1VfRXV99ZFbw5HHsaGxzEibQWxYrNM+R9/oMH7/w1Hc+Oo3/GXJVu48Y8jxd2g72Ski3mlxKNWbOKNzeboxpgfz0r3HxxuLWbxlP/fOGkaKs2q3O6D4YDH/3ftfvir6iq+KvqKs1jYjtE8wUY0lRO9fS0xoDNEh0ST3SSY6JJqY0BhCAkPYd3Afe2v28n3593y2+zPqm+uPOnZwQLBdLf2H5WFOSTmFMzPPZEbaDCKCe16WYebIZH40PpWnl25j2pAkxg88TuJuu/BH/1E9PrdSvZGXXD30vJq6Rh5cuIlhydH8dFKGW89dXV/NquJVrUl9V9UuAOLD4pmYPJGTB5zMBCLo9+pFBM5+DYadbddxm00zZbVl7KnZw96aveyt2UtlfSXRIdFEBUcRGRJJVEiU9TwkiqiQKCKDI8mvymfRzkUs2rWI5YXLCQsMY2raVM7MOJPJqZMJDXR8NaoHzhnOVztKuf2NdSy6dQp9OruAHaOTnZTqqZ4meAP8R0QM8H/GmPntNxCR64HrAdLT03t4OteZ+5/v2VddyzOXjyPIhUMiK+sq2Vy2mc2l1m1T2Sbyq6zRJeFB4YzvN54fDf4RJw84mUGxg46MvV//lnXfjSGSARJAYngiieGJjEkaY/d+wxKGMSxhGLeNv421+9fy4c4P+ST/Ez7e9TGRwZGcmn4qs4fMZlRS91vWUWHBzL04h9nz/8vDH2zi0QtHd7xhZD+QQB0qqVQP9DTBTzbG7BGRvsAnIrLFGLO87Qa2pD8fIDc31yuHRHy7p5IFX+7ksgnpjEuPc8oxG5ob2FO9h/yqfL4v/55NpZvYXLaZPTVHWqTJfZIZFj+Ms7POZny/8YxJGkNIYCflh0u3AQJxGU6Jzx4BEsC4fuMY128c90y4h5VFK1m0cxGLCxbz3vb3ODvrbG4ddyv9+/Tv1nEnZMZzww9O4Lll2zlrdDJTBiV1cPJAiOqvCV6pHuhRgjfG7LHd7xeRd4EJwPLj7+VdmpoN9767gfg+Idw1c2i39m02zeyt2UtBVQH51fnkVx257a3ZS5Npat02PSqdkYkjuWjwRVYLOX4YcWHd+GVSth1i0yDYM8M2gwKCmJQyiUkpk5jTMIfnNzzPSxtf4tP8T7l65NVcNfIqwoPsv25x++mDWLh2D39ZvK3jBA86Fl6pHnI4wYtIHyDAGFNte3wG8BunReYmr63MZ11hJU9dkkNMeLDd+60rWcdD/32IreVbW18LDwpnYPRAhicMZ2bGTAZGD2Rg9ECyYrOIDonuWaAtVSS9QERwBL8Y9wsuHHwhc/Pm8sy6Z3h769vcPv52ZmXOsqukQ2hQINdOyeI3H2xi1a4yTszo4IJr9ADYv9kFn0Cp3qEnLfh+wLu2L3MQ8HdjzEdOicpN9lfV8sePvmNydiLnjhlg1z419TU89c1TvPHdG/SN6Muck+aQHZtNRnQGieGJrqlXY4w1Bn7MJc4/dg+kRKbw+LTHySvO4w+r/sA9K+7h9S2vc/eJd9vVP3/JhDT+smQrzyzdxos/nXDsBtEpsPVT6/N7ug6QUj7I4QRvjNkB2H/lzgv95oNN1DU18/D5I+1KzIsLFvO7lb+j5FAJlw27jJ+P/Tl9gvs4P7CGw1BdbN1qiqE8H+qqvLZMcG7/XF4/63UWbl/In9f8mcs+vIxzss7hjtw7SAxP7HS/iJAgrp6UyeOffM+mvVUMH9Dur5zoAdBw0PrsYR5aYEUpH9Zrh0ku+76ED9YXcftpg8lMPH6SLj5YzO9X/p4lu5cwJG4IT0570qERJEdpboKSLbD7a9izGip325J6EdRWHrt9SCSkT+zZOV0oMCCQCwZdwBkZZ/D8hud5eePLrCxaydzpc487gucnJ2fw3LLtPLtsO3+5dOzRb7ZMdqrcowleKQdItyr89VBubq7Jy8tz2/k6U9vQxBlPLCcoQFh02xRCgwI73K6puYk3v3+Tp755iqbmJm7KuYkrhl9BcID9ffWtDpdDYZ6V0Au/hsLVUF9tvRcebxUQi+oPUcnWEMGoZNtz22vhcT7VTfFd2XfctvQ2ig8VM+ekOVw0+KJOt/39h5v564odLLlzGhltf9mW7YTvFsHICyHKy8pGKOVGIrLakUoBvTLBP7poC88t287frz2JU7I77kLYWbmT+764j/Ul6zllwCncN/E+0qJsqzkZY7W2y3fBwRJoOAT1NVB/yPb4oO3+kJXES76DA99b+0oA9B0BaSdC6gRImwDxWT6VvO1VWVfJ3cvv5ou9X3DhoAuZc9KcDoeB7q+qZfIflnLhuBR+/8NOxsUr1Ys5muB7XRfN2t0VzF++nUtOTOs0ua/Zv4ZbFt9CgDH8PvsyzgqMR754xmpRlu+ybo2HOz9JUBgER0BIH+s+LhNGX2wl9JRxPlcN0lExoTE8ferTzFs7j+c3PM/Wiq08Me0J+kYcvaZt3+gwLhqfylt5hdx22mD6eVMFT6V8WK9qwdc2NHHOXz6npq6Rj2//AdFhtq6WhlrYvxGK1rEkfzF3Va+nf2MjzxXvI7XRNpY9KBziM62JRnGZtseZVtdBcITVRx4SYT0O6LjLpzf7z67/cN8X9xERFMHcaXMZ12/cUe8XlB5i2p+Wcs3kTO49a7iHolTKO2kL3g5/XryVov37eeWscKLXPg9F66FonXWx0zTxZlQkv02IYwShzEv5H+Jzx1ndJ3EZtqnz/teN4i5nZJxBVkwWty69lWs+voa7J9zN7CGzW0cvpSdEcM6YAby2soCbp2cTG9HJjF6llN38uwVfUwLF66BoHRU7VlOxPY+MgH1H3u/TF5LHYPqP5hlzgOf2LmVKyhT+NPVPTqmeqI5VVV/FPcvvYcWeFZyffT6/nvjr1n75LcVVzHxyBbefNphbPb3AuVJepPe14I2BQ2XWsMLqYqjee2SYYWUhFG+wHtscln7sDMyk/+SrCUsbB8mjIao/jc2NPPLVI7y9dSnnZ5/P/Sff79goGWWX6JBo5p06j2fWPsP/rf8/6pvqeXTKo4gIQ/tHc9qwvrz45U6unZLZeaVJpZRdvPcb1FhnJeqKgmNvLcm8qf7Y/cLjrfHTGVMgeQwkj+bPm8KYu6KEF686kbChRy7wHW48zC+X/ZJlhcu4btR1/Hzsz10zE1UdJUACuGXsLYQFhfHUN08xMHogP8v5GQA3Tcvmwme/5PWvC7h2SpaHI1XKt3kuwTc3Wy3sllEp5bugIt+atVlRYGt9t+k+kkBr6npsGqRNtMaHRw84Mk68Zfx4u2Jc6wsreOqLL/nR+FSmt0nu5bXl3LLkFjaUbODek+7lkqHeVQagN7hm5DXsqtzFs+ueJT063aqqOTCOkzLjeX7FTq44eWCncxSUUl1zb4KvLIRXf2RL5gXQVHfkPQmwEnhcBpwwHWLTj75FDYDA7oVb19jEL99aT2JkCL8++8jIjMLqQm769Cb21uxl7rS5nDbwNOd8PtUtIsIDJz/A3oN7uf+L+xnQZwDj+o3j5unZ/ORvX/OvNXuYfaL3riGglLdzb4I/XAYH90PfYTDkTNuQQ9stJg2CnDtyYt6SbXy3r5q/XZXbWily9b7V3L70dhpNI/PPmM/4fuOdek7VPcGBwTwx7Qku//Bybl16K6/Neo0pg9IYmRLNc8t28KPxaQQGaLeZUo5w3dJFHek/Gm5YDrNfgTMehhOvgexTrWn6Tk7u3+6p5JnPtnPhuFRmDLWmub+79V2u/c+1xITG8PdZf9fk7iVaJkQB3Lz4Zqrqq/jZtGx2HjjIom+LuthbKdUZ9yZ4N6lvbOZ/31pHQp8Q7j97OE3NTTye9zj3f3k/uf1yeXXWq2TEZHg6TNVGenQ6T05/kj01e7jjszuYMTSBrMQ+PLN0O+4cyquUP/HLBD9vyVa2FFfzuwtGERhUx61Lb2XBxgVcMuQSnjntGWJCtTKhNxrfbzwPnfIQXxd/zW+/fpgbpmaxqaiKr3eWeTo0pXyS9w6T7AZjDN/tq+bzrQf4fNsBVmw9wA/HpjAsvZErFl3BzsqdOlLGR5xzwjkUVBfw3LrnSM1J5+/XXcCEzA5We1JKdclnE3xR5eHWhP7FtlIO1Fgjck5I6sNPTh7IjJyDXPrBpTSaRp497VlOHnCyhyNW9vrZmJ+RX5XPvLV/5vGpAxE5w9MhKeWT3J7gjTHUNTZT29DEofomDjc0cbjdfW1DE3UNzdQ2Wq/V2h7XNjRRU9vI6oJydpQcBCAxMpTJ2QlMyk5kUnYisX0Mi3Yu4hefPUJKZArzZszT/nYfIyI8POlhimqKmPP5HFKiUhiRMMLTYSnlc9xaiyZ8wGCTfOUTNDtwyuBAISw4kPDgQIYMCGRIaj3JiQdpDCihoLqAwupCCqoLOHD4AAAnJZ/E41Mf1/52H1ZWW8a8NfO4M/dO1yyNqJSP8MiCHyIyE3gKCASeN8Y8erztY7P6mxmPXoIENCHSBGLdG5owNGJootl232QaaTaNNBnrcWNzI42mkfqmeg63q8XeN7wvadFppEWlkR6VTmZMJlPTpmpNGaWUX3B7sTERCQSeBk4HCoFVIrLQGLOps32aOEglGwiWYIICgggOCCY4IJiQAOt5UEAIwa2Pg1q3aX0uQQQHBtMvoh9pUVZCT41KJTwo3NGPoZRSfqsnffATgG3GmB0AIvIP4Dyg0wQ/JH4Iiy9e3INTKqWUsldPxsGnALvbPC+0vXYUEbleRPJEJK+kpKQHp1NKKdUdLp/oZIyZb4zJNcbkJiUlufp0SimlbHqS4PcAaW2ep9peU0op5QV6kuBXAYNEJFNEQoBLgIXOCUsppVRPOXyR1RjTKCK3AB9jDZP8mzFmo9MiU0op1SM9mslqjPkQ+NBJsSillHIiv6wmqZRSShO8Ukr5LU3wSinlpzTBK6WUn3JrNUkRKQHyXXDoROCAC47rShqz6/lavOB7MftavOCbMQ8xxkR1dye31oM3xrhkKquI5DlSac2TNGbX87V4wfdi9rV4wXdjdmQ/7aJRSik/pQleKaX8lL8k+PmeDsABGrPr+Vq84Hsx+1q80ItidutFVqWUUu7jLy14pZRS7WiCV0opP+UzCV5E0kRkqYhsEpGNInJrJ9tNE5G1tm2WuTvOdrF0GbOIxIjI+yKyzrbNTz0Rqy2WMBH5uk0sD3WwTaiIvCEi20RkpYhkuD/So+KxJ+Y7bP8G60VksYgM9ESsbeLpMuY2214oIkZEPDasz954ReTiNv/X/+7uONvFYs//i3Tb93ON7f/GLE/E2i6mQFs8H3TwXve/e8YYn7gBycA42+Mo4HtgeLttYrHWhE23Pe/rAzHPAR6zPU4CyoAQD8UrQKTtcTCwEpjYbpufAc/ZHl8CvOHhn7E9MU8HImyPb/KFmNv8n1kOfAXkenO8wCBgDRBne+7p7549Mc8HbrI9Hg7s8mTMtjjuAP4OfNDBe93+7vlMC94YU2SM+cb2uBrYzLFrwF4GvGOMKbBtt9+9UR7NzpgNECUiAkRiJfhGtwbaEoilxvY02HZrfxX+POAl2+N/AqfaYvcIe2I2xiw1xhyyPf0Ka/Uxj7Hz5wzwMPAYUOuu2DpiZ7zXAU8bY8pt+3j6u2dPzAaItj2OAfa6KbwOiUgqcBbwfCebdPu75zMJvi3bnyZjsX4rtzUYiBORz0RktYj8xN2xdeY4Mc8DhmH959oA3GqMaXZrcG3Y/kRcC+wHPjHGtI+3dbF1Y0wjUAkkuDfKo9kRc1vXAIvcE1nnuopZRMYBacaYf3skwHbs+BkPBgaLyBci8pWIzHR/lEezI+YHgctFpBBrXYufuznE9p4E7gI6+/53+7vncwleRCKBt4HbjDFV7d4OAsZj/Rb8H+DXIjLYzSEeo4uY/wdYCwwAcoB5IhKNhxhjmowxOVit3AkiMtJTsdjL3phF5HIgF/ijO+PryPFiFpEAYC5wp6fia8+On3EQVjfNNOBS4K8iEuveKI9mR8yXAguMManALOAV28/e7UTkbGC/MWa1M4/rUwleRIKxEuVrxph3OtikEPjYGHPQGHMAq/9yjDtjbM+OmH+K1a1kjDHbgJ3AUHfG2BFjTAWwFGjfEmtdbF1EgrD+tC11b3QdO07MiMhpwL3AucaYOnfH1plOYo4CRgKficguYCKw0JMXWlsc52dcCCw0xjQYY3ZiXW8a5O74OnKcmK8B3rRt818gDKsQmSdMAs61/Xv/A5ghIq+226bb3z2fSfC2vqYXgM3GmLmdbPYeMFlEgkQkAjgJq9/bI+yMuQA41bZ9P2AIsMM9ER5NRJJaWl0iEg6cDmxpt9lC4Erb4x8BS4ztqo8n2BOziIwF/g8ruXu0b9gWz3FjNsZUGmMSjTEZxpgMrOsG5xpjHCo45ep4bf6F1XpHRBKxumw88v/YFoM9Mbf97g3DSvAl7oyzhTHmV8aYVNu/9yVY36vL223W7e+eW6tJ9tAk4Apgg61fDawRKOkAxpjnjDGbReQjYD1WP9bzxphvPRKtpcuYsS6kLRCRDVhX/u+2/fXhCcnASyISiPXL/01jzAci8hsgzxizEOsX1isisg3rgvAlHoq1hT0x/xHrAvZbtmtSBcaYcz0WsX0xexN74v0YOENENgFNwC+NMZ78y86emO/E6kq6HeuC61WebKx0pKffPS1VoJRSfspnumiUUkp1jyZ4pZTyU5rglVLKT2mCV0opP6UJXiml/JQmeKWU8lOa4JVSyk/9P9fVjkLJXcpBAAAAAElFTkSuQmCC\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"plt.figure()\n",
"plt.xlim(2.5,4)\n",
"plt.title('Edificios')\n",
"plt.plot(w1,max1,label='Edificio 1')\n",
"plt.plot(w2,max2,label='Edificio 2')\n",
"plt.plot(w3,max3,label='Edificio 3')\n",
"plt.legend()\n",
"plt.show()\n",
"plt.savefig('Graficas de edificios')\n",
"plt.close()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.3"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
| 213.967742
| 18,100
| 0.923815
| 761
| 19,899
| 24.13272
| 0.8318
| 0.00196
| 0.002614
| 0.004574
| 0.005663
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153593
| 0.023016
| 19,899
| 92
| 18,101
| 216.293478
| 0.79106
| 0
| 0
| 0.217391
| 0
| 0.01087
| 0.961405
| 0.921353
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.021739
| 0
| 0.021739
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
26c4407547236f30dc8ad32e2d9242ea29893f06
| 2,357
|
py
|
Python
|
golf/migrations/0004_auto_20170720_1211.py
|
kenrumer/scorekeeper
|
c7f22676e84dfdf6ca3361c6ff56719f68fce31f
|
[
"MIT"
] | null | null | null |
golf/migrations/0004_auto_20170720_1211.py
|
kenrumer/scorekeeper
|
c7f22676e84dfdf6ca3361c6ff56719f68fce31f
|
[
"MIT"
] | null | null | null |
golf/migrations/0004_auto_20170720_1211.py
|
kenrumer/scorekeeper
|
c7f22676e84dfdf6ca3361c6ff56719f68fce31f
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-20 19:11
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('golf', '0003_auto_20170719_1442'),
]
operations = [
migrations.AddField(
model_name='course',
name='default',
field=models.BooleanField(default=False, help_text='Set a default for faster starts to putting scores in', verbose_name='Default'),
),
migrations.AddField(
model_name='course',
name='priority',
field=models.IntegerField(default=-1, help_text='Highest priority will be listed first in selecting format', verbose_name='Priority'),
),
migrations.AddField(
model_name='coursetee',
name='default',
field=models.BooleanField(default=False, help_text='Set a default for faster starts to putting scores in', verbose_name='Default'),
),
migrations.AddField(
model_name='coursetee',
name='priority',
field=models.IntegerField(default=-1, help_text='Highest priority will be listed first in selecting format', verbose_name='Priority'),
),
migrations.AddField(
model_name='format',
name='default',
field=models.BooleanField(default=False, help_text='Set a default for faster starts to putting scores in', verbose_name='Default'),
),
migrations.AddField(
model_name='format',
name='priority',
field=models.IntegerField(default=-1, help_text='Highest priority will be listed first in selecting format', verbose_name='Priority'),
),
migrations.AddField(
model_name='player',
name='priority',
field=models.IntegerField(default=-1, help_text='Highest priority will be listed first in selecting format', verbose_name='Priority'),
),
migrations.AlterField(
model_name='tee',
name='course_tee',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='golf.CourseTee', verbose_name='Course Tee Id'),
),
]
| 41.350877
| 147
| 0.61434
| 253
| 2,357
| 5.592885
| 0.272727
| 0.050883
| 0.113781
| 0.133569
| 0.75053
| 0.75053
| 0.701767
| 0.701767
| 0.701767
| 0.701767
| 0
| 0.021726
| 0.277471
| 2,357
| 56
| 148
| 42.089286
| 0.80916
| 0.02885
| 0
| 0.714286
| 1
| 0
| 0.2713
| 0.010314
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.061224
| 0
| 0.122449
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
26df09d2ab4ca682d717878fe6737276bd55d62c
| 10,496
|
py
|
Python
|
hsstock/app/collect/futu/int_mysqlschema_kline_history_1M_app.py
|
hsstock/hsstock
|
f8841331022e8844537a5c5b08d047e2cc328856
|
[
"Apache-2.0"
] | 2
|
2018-10-04T08:04:24.000Z
|
2021-01-21T06:58:30.000Z
|
hsstock/app/collect/futu/int_mysqlschema_kline_history_1M_app.py
|
hsstock/hsstock
|
f8841331022e8844537a5c5b08d047e2cc328856
|
[
"Apache-2.0"
] | null | null | null |
hsstock/app/collect/futu/int_mysqlschema_kline_history_1M_app.py
|
hsstock/hsstock
|
f8841331022e8844537a5c5b08d047e2cc328856
|
[
"Apache-2.0"
] | 1
|
2018-10-20T09:39:50.000Z
|
2018-10-20T09:39:50.000Z
|
# -*- coding: UTF-8 -*-
import logging
import sqlalchemy as sa
import pandas as pd
from hsstock.service.mysql_service import MysqlService
from hsstock.utils.app_logging import setup_logging
def main():
"""
应用分表
:return:
"""
storeservice = MysqlService()
storeservice2 = MysqlService(2)
#
#
#
# # The total number of history tables is 266, but last table is 230
# kline_1m_tables_number = 266+1
# schemaArr = [
# {
# "table": "ft_1M_{0}",
# "dtype": {
# "id": sa.types.BIGINT,
# "code": sa.types.NVARCHAR(20),
# "time_key": sa.types.DATETIME,
# "open": sa.types.FLOAT,
# "close": sa.types.FLOAT,
# "high": sa.types.FLOAT,
# "low": sa.types.FLOAT,
# "pe_ratio": sa.types.FLOAT,
# "turnover_rate": sa.types.FLOAT,
# "volume": sa.types.BIGINT,
# "turnover": sa.types.FLOAT,
# "change_rate": sa.types.FLOAT,
# "last_close": sa.types.FLOAT
# },
# "clauses": [
# 'ALTER TABLE `{0}` ADD PRIMARY KEY (`id`);',
# 'ALTER TABLE `{0}` ADD UNIQUE INDEX (`code`,`time_key`);',
# 'ALTER TABLE `{0}` MODIFY COLUMN id BIGINT NOT NULL AUTO_INCREMENT COMMENT \'id\'',
# 'ALTER TABLE `{0}` MODIFY COLUMN pe_ratio FLOAT COMMENT \'市盈率\';',
# 'ALTER TABLE `{0}` MODIFY COLUMN turnover_rate FLOAT COMMENT \'换手率\';',
# 'ALTER TABLE `{0}` MODIFY COLUMN volume BIGINT COMMENT \'成交量\';',
# 'ALTER TABLE `{0}` MODIFY COLUMN turnover FLOAT COMMENT \'成交额\';',
# 'ALTER TABLE `{0}` MODIFY COLUMN change_rate FLOAT COMMENT \'涨跌幅\';',
# 'ALTER TABLE `{0}` MODIFY COLUMN last_close FLOAT COMMENT \'昨收价\';',
# 'ALTER TABLE `{0}` ENGINE=MyISAM;'
# ]
# },
# ]
#
# try:
# logging.info("create sub kline 1m schema, starting")
#
# for index in range(217,kline_1m_tables_number,1):
# for schema in schemaArr:
# df = pd.DataFrame(None, columns=schema['dtype'].keys())
# table = schema['table'].format(index)
# logging.info(table)
# logging.info('table:{0}'.format(table))
# clauses = []
# for clause in schema['clauses']:
# clause = clause.format(table)
# clauses.append(clause)
# storeservice.init_schema(table, df, schema['dtype'], clauses)
#
# logging.info("create sub kline 1m, end")
# except IOError as err:
# logging.error("OS|error: {0}".format(err))
# else:
# logging.info('create sub kline 1m success')
#
#
# union_table = [('ft_1M_{0}'.format(table)) for table in range(1, kline_1m_tables_number, 1)]
# mrg_kline_claus = 'ALTER TABLE `{0}` ENGINE = MRG_MyISAM UNION = ({1}) INSERT_METHOD = LAST;'.format({0}, ','.join(union_table))
# schemaArr = [
# {
# "table": "ft_1m",
# "dtype": {
# "id": sa.types.BIGINT,
# "code": sa.types.NVARCHAR(20),
# "time_key": sa.types.DATETIME,
# "open": sa.types.FLOAT,
# "close": sa.types.FLOAT,
# "high": sa.types.FLOAT,
# "low": sa.types.FLOAT,
# "pe_ratio": sa.types.FLOAT,
# "turnover_rate": sa.types.FLOAT,
# "volume": sa.types.BIGINT,
# "turnover": sa.types.FLOAT,
# "change_rate": sa.types.FLOAT,
# "last_close": sa.types.FLOAT
# },
# "clauses": [
# 'ALTER TABLE `{0}` ADD PRIMARY KEY (`id`);',
# 'ALTER TABLE `{0}` ADD UNIQUE INDEX (`code`,`time_key`);',
# 'ALTER TABLE `{0}` MODIFY COLUMN id BIGINT NOT NULL AUTO_INCREMENT COMMENT \'id\'',
# 'ALTER TABLE `{0}` MODIFY COLUMN pe_ratio FLOAT COMMENT \'市盈率\';',
# 'ALTER TABLE `{0}` MODIFY COLUMN turnover_rate FLOAT COMMENT \'换手率\';',
# 'ALTER TABLE `{0}` MODIFY COLUMN volume BIGINT COMMENT \'成交量\';',
# 'ALTER TABLE `{0}` MODIFY COLUMN turnover FLOAT COMMENT \'成交额\';',
# 'ALTER TABLE `{0}` MODIFY COLUMN change_rate FLOAT COMMENT \'涨跌幅\';',
# 'ALTER TABLE `{0}` MODIFY COLUMN last_close FLOAT COMMENT \'昨收价\';',
# mrg_kline_claus
# ]
# }
# ]
# try:
# logging.info("create kline 1m schema, starting")
#
# for schema in schemaArr:
# df = pd.DataFrame(None, columns=schema['dtype'].keys())
# table = schema['table']
# logging.info(table)
# logging.info('table:{0}'.format(table))
# clauses = []
# for clause in schema['clauses']:
# clause = clause.format(table)
# clauses.append(clause)
# storeservice.init_schema(table, df, schema['dtype'], clauses)
#
# logging.info("create kline 1m, end")
# except IOError as err:
# logging.error("OS|error: {0}".format(err))
# else:
# logging.info('create kline 1m success')
def main2():
"""
一只商品一个表
:return:
"""
storeservice = MysqlService(2)
# The total number of history tables is 266, but last table is 230
kline_1m_tables_number = 266+1
schemaArr = [
{
"table": "ft_1M_{0}",
"dtype": {
"id": sa.types.BIGINT,
"code": sa.types.NVARCHAR(20),
"time_key": sa.types.DATETIME,
"open": sa.types.FLOAT,
"close": sa.types.FLOAT,
"high": sa.types.FLOAT,
"low": sa.types.FLOAT,
"pe_ratio": sa.types.FLOAT,
"turnover_rate": sa.types.FLOAT,
"volume": sa.types.BIGINT,
"turnover": sa.types.FLOAT,
"change_rate": sa.types.FLOAT,
"last_close": sa.types.FLOAT
},
"clauses": [
'ALTER TABLE `{0}` ADD PRIMARY KEY (`id`);',
'ALTER TABLE `{0}` ADD UNIQUE INDEX (`code`,`time_key`);',
'ALTER TABLE `{0}` MODIFY COLUMN id BIGINT NOT NULL AUTO_INCREMENT COMMENT \'id\'',
'ALTER TABLE `{0}` MODIFY COLUMN pe_ratio FLOAT COMMENT \'市盈率\';',
'ALTER TABLE `{0}` MODIFY COLUMN turnover_rate FLOAT COMMENT \'换手率\';',
'ALTER TABLE `{0}` MODIFY COLUMN volume BIGINT COMMENT \'成交量\';',
'ALTER TABLE `{0}` MODIFY COLUMN turnover FLOAT COMMENT \'成交额\';',
'ALTER TABLE `{0}` MODIFY COLUMN change_rate FLOAT COMMENT \'涨跌幅\';',
'ALTER TABLE `{0}` MODIFY COLUMN last_close FLOAT COMMENT \'昨收价\';',
'ALTER TABLE `{0}` ENGINE=MyISAM;'
]
},
]
try:
logging.info("create sub kline 1m schema, starting")
for index in range(217,kline_1m_tables_number,1):
for schema in schemaArr:
df = pd.DataFrame(None, columns=schema['dtype'].keys())
table = schema['table'].format(index)
logging.info(table)
logging.info('table:{0}'.format(table))
clauses = []
for clause in schema['clauses']:
clause = clause.format(table)
clauses.append(clause)
storeservice.init_schema(table, df, schema['dtype'], clauses)
logging.info("create sub kline 1m, end")
except IOError as err:
logging.error("OS|error: {0}".format(err))
else:
logging.info('create sub kline 1m success')
union_table = [('ft_1M_{0}'.format(table)) for table in range(1, kline_1m_tables_number, 1)]
mrg_kline_claus = 'ALTER TABLE `{0}` ENGINE = MRG_MyISAM UNION = ({1}) INSERT_METHOD = LAST;'.format({0}, ','.join(union_table))
schemaArr = [
{
"table": "ft_1m",
"dtype": {
"id": sa.types.BIGINT,
"code": sa.types.NVARCHAR(20),
"time_key": sa.types.DATETIME,
"open": sa.types.FLOAT,
"close": sa.types.FLOAT,
"high": sa.types.FLOAT,
"low": sa.types.FLOAT,
"pe_ratio": sa.types.FLOAT,
"turnover_rate": sa.types.FLOAT,
"volume": sa.types.BIGINT,
"turnover": sa.types.FLOAT,
"change_rate": sa.types.FLOAT,
"last_close": sa.types.FLOAT
},
"clauses": [
'ALTER TABLE `{0}` ADD PRIMARY KEY (`id`);',
'ALTER TABLE `{0}` ADD UNIQUE INDEX (`code`,`time_key`);',
'ALTER TABLE `{0}` MODIFY COLUMN id BIGINT NOT NULL AUTO_INCREMENT COMMENT \'id\'',
'ALTER TABLE `{0}` MODIFY COLUMN pe_ratio FLOAT COMMENT \'市盈率\';',
'ALTER TABLE `{0}` MODIFY COLUMN turnover_rate FLOAT COMMENT \'换手率\';',
'ALTER TABLE `{0}` MODIFY COLUMN volume BIGINT COMMENT \'成交量\';',
'ALTER TABLE `{0}` MODIFY COLUMN turnover FLOAT COMMENT \'成交额\';',
'ALTER TABLE `{0}` MODIFY COLUMN change_rate FLOAT COMMENT \'涨跌幅\';',
'ALTER TABLE `{0}` MODIFY COLUMN last_close FLOAT COMMENT \'昨收价\';',
mrg_kline_claus
]
}
]
try:
logging.info("create kline 1m schema, starting")
for schema in schemaArr:
df = pd.DataFrame(None, columns=schema['dtype'].keys())
table = schema['table']
logging.info(table)
logging.info('table:{0}'.format(table))
clauses = []
for clause in schema['clauses']:
clause = clause.format(table)
clauses.append(clause)
storeservice.init_schema(table, df, schema['dtype'], clauses)
logging.info("create kline 1m, end")
except IOError as err:
logging.error("OS|error: {0}".format(err))
else:
logging.info('create kline 1m success')
if __name__ == "__main__":
setup_logging()
main2()
| 10,496
| 10,496
| 0.504668
| 1,120
| 10,496
| 4.636607
| 0.108929
| 0.070094
| 0.084729
| 0.091662
| 0.94897
| 0.94897
| 0.94897
| 0.94897
| 0.94897
| 0.94897
| 0
| 0.018532
| 0.352229
| 10,496
| 1
| 10,496
| 10,496
| 0.745257
| 0.991806
| 0
| 0.66087
| 0
| 0
| 0.287426
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017391
| false
| 0
| 0.043478
| 0
| 0.06087
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
f832d87421aedf7d760c9765913be7ac67d93a04
| 51
|
py
|
Python
|
app/instance/config.py
|
batyrotich/News-Api
|
50e4483afe669ff2678b2e9751643e01e5e4e515
|
[
"MIT"
] | null | null | null |
app/instance/config.py
|
batyrotich/News-Api
|
50e4483afe669ff2678b2e9751643e01e5e4e515
|
[
"MIT"
] | null | null | null |
app/instance/config.py
|
batyrotich/News-Api
|
50e4483afe669ff2678b2e9751643e01e5e4e515
|
[
"MIT"
] | null | null | null |
NEWS_API_KEY = '<8333b14f2cca4769b762a0c1d11b17c5>'
| 51
| 51
| 0.862745
| 4
| 51
| 10.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.428571
| 0.039216
| 51
| 1
| 51
| 51
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0.653846
| 0.653846
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3e3e27a4327fa779bb6074beb92e74a117ff24a6
| 126
|
py
|
Python
|
aioftx/futures/api.py
|
metta-team/aioftx
|
f5bd028e8bf40c55c1d4632802b792be113e0978
|
[
"MIT"
] | null | null | null |
aioftx/futures/api.py
|
metta-team/aioftx
|
f5bd028e8bf40c55c1d4632802b792be113e0978
|
[
"MIT"
] | null | null | null |
aioftx/futures/api.py
|
metta-team/aioftx
|
f5bd028e8bf40c55c1d4632802b792be113e0978
|
[
"MIT"
] | null | null | null |
from expired.api import *
from futures.api import *
from indexes.api import *
from rates.api import *
from stats.api import *
| 21
| 25
| 0.761905
| 20
| 126
| 4.8
| 0.4
| 0.46875
| 0.541667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15873
| 126
| 5
| 26
| 25.2
| 0.90566
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3e6c4a61f72e546776c7c0417dac5df3855c1de5
| 25,650
|
py
|
Python
|
keystone_tempest_plugin/tests/rbac/v3/test_project_tag.py
|
openstack/keystone-tempest-plugin
|
32e48a7ea5db99e31dae916f7964c8219025f257
|
[
"Apache-2.0"
] | 8
|
2017-06-20T10:42:19.000Z
|
2019-01-28T22:03:43.000Z
|
keystone_tempest_plugin/tests/rbac/v3/test_project_tag.py
|
openstack/keystone-tempest-plugin
|
32e48a7ea5db99e31dae916f7964c8219025f257
|
[
"Apache-2.0"
] | null | null | null |
keystone_tempest_plugin/tests/rbac/v3/test_project_tag.py
|
openstack/keystone-tempest-plugin
|
32e48a7ea5db99e31dae916f7964c8219025f257
|
[
"Apache-2.0"
] | 2
|
2018-06-15T06:45:32.000Z
|
2019-09-27T00:01:35.000Z
|
# Copyright 2020 SUSE LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
from tempest.api.identity import base
from tempest.lib.common.utils import data_utils
from tempest.lib import exceptions
from keystone_tempest_plugin.tests.rbac.v3 import base as rbac_base
class IdentityV3RbacProjectTagTests(rbac_base.IdentityV3RbacBaseTests,
metaclass=abc.ABCMeta):
@classmethod
def setup_clients(cls):
super(IdentityV3RbacProjectTagTests, cls).setup_clients()
cls.persona = getattr(cls, 'os_%s' % cls.credentials[0])
cls.client = cls.persona.project_tags_client
cls.admin_client = cls.os_system_admin
cls.admin_project_tags_client = cls.admin_client.project_tags_client
@abc.abstractmethod
def test_identity_create_project_tag(self):
"""Test identity:create_project_tag policy.
This test must check:
* whether the persona can create a tag for an arbitrary project
* whether the persona can create a tag for a project in their own
domain
* whether the persona can create a tag for a project in another
domain
* whether the persona can create a tag for their own project
"""
pass
@abc.abstractmethod
def test_identity_get_project_tag(self):
"""Test identity:get_project_tag policy.
This test must check:
* whether the persona can get a tag for an arbitrary project
* whether the persona can get a tag for a project in their own domain
* whether the persona can get a tag for a project in another domain
* whether the persona can get tag for their own project
"""
pass
@abc.abstractmethod
def test_identity_list_project_tags(self):
"""Test identity:list_project_tags policy.
This test must check:
* whether the persona can list tags for an arbitrary project
* whether the persona can list tags for a project in their own domain
* whether the persona can list tags for a project in another domain
* whether the persona can list tags for their own project
"""
pass
@abc.abstractmethod
def test_identity_update_project_tags(self):
"""Test identity:update_project_tags policy.
This test must check:
* whether the persona can update all tags for an project
* whether the persona can update all tags for a project in their own
domain
* whether the persona can update all tags for a project in another
domain
* whether the persona can update all tags for their own project
"""
pass
@abc.abstractmethod
def test_identity_delete_project_tag(self):
"""Test identity:delete_project_tag policy.
This test must check
* whether the persona can delete a single tag for an arbitrary
project
* whether the persona can delete a single tag for a project in their
own domain
* whether the persona can delete a single tag for a project in
another domain
* whether the persona can delete a single tag for their own project
"""
pass
@abc.abstractmethod
def test_identity_delete_project_tags(self):
"""Test identity:delete_project_tag policy.
This test must check
* whether the persona can delete all tags for an arbitrary project
* whether the persona can delete all tags for a project in their own
domain
* whether the persona can delete all tags for a project in another
domain
* whether the persona can delete all tags for their own project
"""
pass
class SystemAdminTests(IdentityV3RbacProjectTagTests, base.BaseIdentityTest):
credentials = ['system_admin']
def setUp(self):
super(SystemAdminTests, self).setUp()
self.project_id = self.admin_client.projects_client.create_project(
name=data_utils.rand_name())['project']['id']
self.addCleanup(
self.admin_client.projects_client.delete_project, self.project_id)
def test_identity_create_project_tag(self):
self.do_request(
'update_project_tag', expected_status=201,
project_id=self.project_id,
tag=data_utils.rand_uuid_hex()
)
def test_identity_get_project_tag(self):
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.project_id, tag=tag)
self.do_request('check_project_tag_existence',
expected_status=204,
project_id=self.project_id, tag=tag)
def test_identity_list_project_tags(self):
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.project_id, tag=tag)
resp = self.do_request('list_project_tags', project_id=self.project_id)
self.assertIn(tag, resp['tags'])
def test_identity_update_project_tags(self):
self.do_request('update_all_project_tags',
project_id=self.project_id,
tags=[data_utils.rand_uuid_hex()])
def test_identity_delete_project_tag(self):
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.project_id, tag=tag)
self.do_request('delete_project_tag', expected_status=204,
project_id=self.project_id,
tag=tag)
def test_identity_delete_project_tags(self):
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.project_id, tag=tag)
self.do_request('delete_all_project_tags', expected_status=204,
project_id=self.project_id)
class SystemMemberTests(SystemAdminTests, base.BaseIdentityTest):
credentials = ['system_member', 'system_admin']
def test_identity_create_project_tag(self):
self.do_request(
'update_project_tag', expected_status=exceptions.Forbidden,
project_id=self.project_id,
tag=data_utils.rand_uuid_hex()
)
def test_identity_update_project_tags(self):
self.do_request('update_all_project_tags',
expected_status=exceptions.Forbidden,
project_id=self.project_id,
tags=[data_utils.rand_uuid_hex()])
def test_identity_delete_project_tag(self):
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.project_id, tag=tag)
self.do_request('delete_project_tag',
expected_status=exceptions.Forbidden,
project_id=self.project_id,
tag=tag)
def test_identity_delete_project_tags(self):
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.project_id, tag=tag)
self.do_request('delete_all_project_tags',
expected_status=exceptions.Forbidden,
project_id=self.project_id)
class SystemReaderTests(SystemMemberTests):
credentials = ['system_reader', 'system_admin']
class DomainAdminTests(IdentityV3RbacProjectTagTests, base.BaseIdentityTest):
credentials = ['domain_admin', 'system_admin']
def setUp(self):
super(DomainAdminTests, self).setUp()
self.own_domain = self.persona.credentials.domain_id
self.other_domain = self.admin_client.domains_client.create_domain(
name=data_utils.rand_name())['domain']['id']
self.addCleanup(self.admin_client.domains_client.delete_domain,
self.other_domain)
self.addCleanup(self.admin_client.domains_client.update_domain,
domain_id=self.other_domain, enabled=False)
project_client = self.admin_client.projects_client
self.own_project_id = project_client.create_project(
name=data_utils.rand_name(),
domain_id=self.own_domain)['project']['id']
self.addCleanup(
project_client.delete_project,
self.own_project_id)
self.other_project_id = project_client.create_project(
name=data_utils.rand_name(),
domain_id=self.other_domain)['project']['id']
self.addCleanup(project_client.delete_project, self.other_project_id)
def test_identity_create_project_tag(self):
# user can add tags to project in own domain
tag = data_utils.rand_uuid_hex()
self.do_request(
'update_project_tag', expected_status=201,
project_id=self.own_project_id,
tag=tag
)
# user cannot add tags to project in other domain
tag = data_utils.rand_uuid_hex()
self.do_request(
'update_project_tag', expected_status=exceptions.Forbidden,
project_id=self.other_project_id,
tag=tag
)
def test_identity_get_project_tag(self):
# user can get tag for project in own domain
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.own_project_id, tag=tag)
self.do_request('check_project_tag_existence',
expected_status=204,
project_id=self.own_project_id, tag=tag)
# user cannot get tag for project in other domain
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.other_project_id, tag=tag)
self.do_request('check_project_tag_existence',
expected_status=exceptions.Forbidden,
project_id=self.other_project_id, tag=tag)
def test_identity_list_project_tags(self):
# user can list tags for project in own domain
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.own_project_id, tag=tag)
resp = self.do_request('list_project_tags',
project_id=self.own_project_id)
self.assertIn(tag, resp['tags'])
# user cannot list tags for project in other domain
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.other_project_id, tag=tag)
self.do_request('list_project_tags',
expected_status=exceptions.Forbidden,
project_id=self.other_project_id)
def test_identity_update_project_tags(self):
# user can update tags for project in own domain
tag = data_utils.rand_uuid_hex()
self.do_request('update_all_project_tags',
project_id=self.own_project_id,
tags=[tag])
# user cannot update tags for project in other domain
tag = data_utils.rand_uuid_hex()
self.do_request('update_all_project_tags',
expected_status=exceptions.Forbidden,
project_id=self.other_project_id,
tags=[tag])
def test_identity_delete_project_tag(self):
# user can delete tag for project in own domain
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.own_project_id, tag=tag)
self.do_request('delete_project_tag', expected_status=204,
project_id=self.own_project_id,
tag=tag)
# user cannot delete tag for project in other domain
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.other_project_id, tag=tag)
self.do_request('delete_project_tag',
expected_status=exceptions.Forbidden,
project_id=self.other_project_id,
tag=tag)
def test_identity_delete_project_tags(self):
# user can delete tags for project in own domain
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.own_project_id, tag=tag)
self.do_request('delete_all_project_tags', expected_status=204,
project_id=self.own_project_id)
# user cannot delete tags for project in other domain
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.other_project_id, tag=tag)
self.do_request('delete_all_project_tags',
expected_status=exceptions.Forbidden,
project_id=self.other_project_id)
class DomainMemberTests(DomainAdminTests, base.BaseIdentityTest):
credentials = ['domain_member', 'system_admin']
def test_identity_create_project_tag(self):
# user cannot add tags to project in own domain
tag = data_utils.rand_uuid_hex()
self.do_request(
'update_project_tag', expected_status=exceptions.Forbidden,
project_id=self.own_project_id,
tag=tag
)
# user cannot add tags to project in other domain
tag = data_utils.rand_uuid_hex()
self.do_request(
'update_project_tag', expected_status=exceptions.Forbidden,
project_id=self.other_project_id,
tag=tag
)
def test_identity_update_project_tags(self):
# user cannot update tags for project in own domain
tag = data_utils.rand_uuid_hex()
self.do_request('update_all_project_tags',
expected_status=exceptions.Forbidden,
project_id=self.own_project_id,
tags=[tag])
# user cannot update tags for project in other domain
tag = data_utils.rand_uuid_hex()
self.do_request('update_all_project_tags',
expected_status=exceptions.Forbidden,
project_id=self.other_project_id,
tags=[tag])
def test_identity_delete_project_tag(self):
# user cannot delete tag for project in own domain
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.own_project_id, tag=tag)
self.do_request('delete_project_tag',
expected_status=exceptions.Forbidden,
project_id=self.own_project_id,
tag=tag)
# user cannot delete tag for project in other domain
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.other_project_id, tag=tag)
self.do_request('delete_project_tag',
expected_status=exceptions.Forbidden,
project_id=self.other_project_id,
tag=tag)
def test_identity_delete_project_tags(self):
# user cannot delete tags for project in own domain
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.own_project_id, tag=tag)
self.do_request('delete_all_project_tags',
expected_status=exceptions.Forbidden,
project_id=self.own_project_id)
# user cannot delete tags for project in other domain
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.other_project_id, tag=tag)
self.do_request('delete_all_project_tags',
expected_status=exceptions.Forbidden,
project_id=self.other_project_id)
class DomainReaderTests(DomainMemberTests):
credentials = ['domain_reader', 'system_admin']
class ProjectAdminTests(IdentityV3RbacProjectTagTests, base.BaseIdentityTest):
credentials = ['project_admin', 'system_admin']
def setUp(self):
super(ProjectAdminTests, self).setUp()
self.own_project_id = self.persona.credentials.project_id
project_client = self.admin_client.projects_client
self.other_project_id = project_client.create_project(
name=data_utils.rand_name())['project']['id']
self.addCleanup(project_client.delete_project, self.other_project_id)
def test_identity_create_project_tag(self):
# user can add tags to own project
tag = data_utils.rand_uuid_hex()
self.do_request(
'update_project_tag', expected_status=201,
project_id=self.own_project_id,
tag=tag
)
self.addCleanup(self.admin_project_tags_client.delete_project_tag,
project_id=self.own_project_id,
tag=tag)
# user cannot add tags to arbitrary project
tag = data_utils.rand_uuid_hex()
self.do_request(
'update_project_tag', expected_status=exceptions.Forbidden,
project_id=self.other_project_id,
tag=tag
)
def test_identity_get_project_tag(self):
# user can get tag for own project
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.own_project_id, tag=tag)
self.addCleanup(self.admin_project_tags_client.delete_project_tag,
project_id=self.own_project_id,
tag=tag)
self.do_request('check_project_tag_existence',
expected_status=204,
project_id=self.own_project_id, tag=tag)
# user cannot get tag for arbitrary project
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.other_project_id, tag=tag)
self.do_request('check_project_tag_existence',
expected_status=exceptions.Forbidden,
project_id=self.other_project_id, tag=tag)
def test_identity_list_project_tags(self):
# user can list tags for own project
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.own_project_id, tag=tag)
self.addCleanup(self.admin_project_tags_client.delete_project_tag,
project_id=self.own_project_id,
tag=tag)
resp = self.do_request('list_project_tags',
project_id=self.own_project_id)
self.assertIn(tag, resp['tags'])
# user cannot list tags for arbitrary project
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.other_project_id, tag=tag)
self.do_request('list_project_tags',
expected_status=exceptions.Forbidden,
project_id=self.other_project_id)
def test_identity_update_project_tags(self):
# user can update tags for own project
tag = data_utils.rand_uuid_hex()
self.do_request('update_all_project_tags',
project_id=self.own_project_id,
tags=[tag])
self.addCleanup(self.admin_project_tags_client.delete_project_tag,
project_id=self.own_project_id,
tag=tag)
# user cannot update tags for arbitrary project
tag = data_utils.rand_uuid_hex()
self.do_request('update_all_project_tags',
expected_status=exceptions.Forbidden,
project_id=self.other_project_id,
tags=[tag])
def test_identity_delete_project_tag(self):
# user can delete tag for own project
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.own_project_id, tag=tag)
self.do_request('delete_project_tag', expected_status=204,
project_id=self.own_project_id,
tag=tag)
# user cannot delete tag for arbitrary project
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.other_project_id, tag=tag)
self.do_request('delete_project_tag',
expected_status=exceptions.Forbidden,
project_id=self.other_project_id,
tag=tag)
def test_identity_delete_project_tags(self):
# user can delete tags for own project
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.own_project_id, tag=tag)
self.do_request('delete_all_project_tags', expected_status=204,
project_id=self.own_project_id)
# user cannot delete tags for arbitrary project
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.other_project_id, tag=tag)
self.do_request('delete_all_project_tags',
expected_status=exceptions.Forbidden,
project_id=self.other_project_id)
class ProjectMemberTests(ProjectAdminTests):
credentials = ['project_member', 'system_admin']
def test_identity_create_project_tag(self):
# user cannot add tags to own project
tag = data_utils.rand_uuid_hex()
self.do_request(
'update_project_tag', expected_status=exceptions.Forbidden,
project_id=self.own_project_id,
tag=tag
)
# user cannot add tags to arbitrary project
tag = data_utils.rand_uuid_hex()
self.do_request(
'update_project_tag', expected_status=exceptions.Forbidden,
project_id=self.other_project_id,
tag=tag
)
def test_identity_update_project_tags(self):
# user cannot update tags for own project
tag = data_utils.rand_uuid_hex()
self.do_request('update_all_project_tags',
expected_status=exceptions.Forbidden,
project_id=self.own_project_id,
tags=[tag])
# user cannot update tags for arbitrary project
tag = data_utils.rand_uuid_hex()
self.do_request('update_all_project_tags',
expected_status=exceptions.Forbidden,
project_id=self.other_project_id,
tags=[tag])
def test_identity_delete_project_tag(self):
# user cannot delete tag for own project
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.own_project_id, tag=tag)
self.addCleanup(self.admin_project_tags_client.delete_project_tag,
project_id=self.own_project_id,
tag=tag)
self.do_request('delete_project_tag',
expected_status=exceptions.Forbidden,
project_id=self.own_project_id,
tag=tag)
# user cannot delete tag for arbitrary project
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.other_project_id, tag=tag)
self.do_request('delete_project_tag',
expected_status=exceptions.Forbidden,
project_id=self.other_project_id,
tag=tag)
def test_identity_delete_project_tags(self):
# user cannot delete tags for own project
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.own_project_id, tag=tag)
self.addCleanup(self.admin_project_tags_client.delete_project_tag,
project_id=self.own_project_id,
tag=tag)
self.do_request('delete_all_project_tags',
expected_status=exceptions.Forbidden,
project_id=self.own_project_id)
# user cannot delete tags for arbitrary project
tag = data_utils.rand_uuid_hex()
self.admin_project_tags_client.update_project_tag(
project_id=self.other_project_id, tag=tag)
self.do_request('delete_all_project_tags',
expected_status=exceptions.Forbidden,
project_id=self.other_project_id)
class ProjectReaderTests(ProjectMemberTests):
credentials = ['project_reader', 'system_admin']
| 42.75
| 79
| 0.645575
| 3,149
| 25,650
| 4.924103
| 0.050492
| 0.107958
| 0.080485
| 0.057075
| 0.88656
| 0.871727
| 0.863537
| 0.843996
| 0.833548
| 0.826067
| 0
| 0.002841
| 0.286433
| 25,650
| 599
| 80
| 42.821369
| 0.844342
| 0.17232
| 0
| 0.853717
| 0
| 0
| 0.063231
| 0.028632
| 0
| 0
| 0
| 0
| 0.007194
| 1
| 0.095923
| false
| 0.014388
| 0.01199
| 0
| 0.153477
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3e793595209615385a674a53294a5cbc1c36bf59
| 3,716
|
py
|
Python
|
src/models/loss.py
|
cerisara/weibull-knowledge-informed-ml
|
19017817f5324fb1fffd8322d2d3567a6271948c
|
[
"MIT"
] | 6
|
2022-01-07T09:35:49.000Z
|
2022-03-30T02:45:20.000Z
|
src/models/loss.py
|
tvhahn/weibull-knowledge-informed
|
bdcb838807ee6bbb5655b275ba0169b76e3f5acc
|
[
"MIT"
] | null | null | null |
src/models/loss.py
|
tvhahn/weibull-knowledge-informed
|
bdcb838807ee6bbb5655b275ba0169b76e3f5acc
|
[
"MIT"
] | 6
|
2021-12-29T01:19:06.000Z
|
2022-03-28T11:20:07.000Z
|
import torch
import torch.nn as nn
class WeibullLossRMSE(nn.Module):
"""
y_hat : predicted RUL
y : true RUL
y_days : true age (in days)
lambda_mod : lambda modifier
eta : characteristic life
beta : shape parameter for weibull
"""
def __init__(self, eps=1e-8):
super(WeibullLossRMSE, self).__init__()
self.eps = eps
def forward(self, y_hat, y, y_days, lambda_mod=2.0, eta=90.0, beta=2.0):
y_hat_days = (y_days + y) - y_hat
# remove any "inf" values from when divided by zero
y_hat_days = y_hat_days[torch.isfinite(y_hat_days)]
def weibull_cdf(t, eta, beta):
"weibull CDF function"
return 1.0 - torch.exp(-1.0 * ((t / eta) ** beta))
cdf = weibull_cdf(y_days, eta, beta)
cdf_hat = weibull_cdf(y_hat_days, eta, beta)
return lambda_mod * torch.sqrt(torch.mean(cdf_hat - cdf) ** 2 + self.eps)
class WeibullLossRMSLE(nn.Module):
"""
y_hat : predicted RUL
y : true RUL
y_days : true age (in days)
lambda_mod : lambda modifier
eta : characteristic life
beta : shape parameter for weibull
"""
def __init__(self, eps=1e-8):
super(WeibullLossRMSLE, self).__init__()
self.eps = eps
def forward(self, y_hat, y, y_days, lambda_mod=2.0, eta=90.0, beta=2.0):
y_hat_days = (y_days + y) - y_hat
# remove any "inf" values from when divided by zero
y_hat_days = y_hat_days[torch.isfinite(y_hat_days)]
def weibull_cdf(t, eta, beta):
"weibull CDF function"
return 1.0 - torch.exp(-1.0 * ((t / eta) ** beta))
cdf = weibull_cdf(y_days, eta, beta)
cdf_hat = weibull_cdf(y_hat_days, eta, beta)
return lambda_mod * torch.sqrt(torch.mean(torch.log(cdf_hat + 1) - torch.log(cdf+1)) ** 2 + self.eps)
class WeibullLossMSE(nn.Module):
"""
y_hat : predicted RUL
y : true RUL
y_days : true age (in days)
lambda_mod : lambda modifier
eta : characteristic life
beta : shape parameter for weibull
"""
def __init__(self):
super(WeibullLossMSE, self).__init__()
def forward(self, y_hat, y, y_days, lambda_mod=2.0, eta=90.0, beta=2.0):
y_hat_days = (y_days + y) - y_hat
# remove any "inf" values from when divided by zero
y_hat_days = y_hat_days[torch.isfinite(y_hat_days)]
def weibull_cdf(t, eta, beta):
"weibull CDF function"
return 1.0 - torch.exp(-1.0 * ((t / eta) ** beta))
cdf = weibull_cdf(y_days, eta, beta)
cdf_hat = weibull_cdf(y_hat_days, eta, beta)
return lambda_mod * torch.mean((cdf_hat - cdf) ** 2)
class RMSELoss(nn.Module):
# https://discuss.pytorch.org/t/rmse-loss-function/16540/4
def __init__(self, eps=1e-8):
super(RMSELoss, self).__init__()
self.mse = nn.MSELoss()
self.eps = eps
def forward(self, y_hat, y):
return torch.sqrt(self.mse(y_hat, y) + self.eps)
class RMSLELoss(nn.Module):
# https://discuss.pytorch.org/t/rmse-loss-function/16540/4
def __init__(self, eps=1e-8):
super(RMSLELoss, self).__init__()
self.mse = nn.MSELoss()
self.eps = eps
def forward(self, y_hat, y):
return torch.sqrt(self.mse(torch.log(y_hat + 1), torch.log(y + 1)) + self.eps)
class MAPELoss(nn.Module):
def __init__(self, eps=1e-8):
super(MAPELoss, self).__init__()
self.eps = eps
def forward(self, y_hat, y):
return torch.mean(torch.abs(y - y_hat) / torch.abs(y + self.eps)) * 100
| 28.151515
| 109
| 0.589612
| 548
| 3,716
| 3.762774
| 0.133212
| 0.058196
| 0.058196
| 0.043647
| 0.835597
| 0.835597
| 0.823957
| 0.813288
| 0.813288
| 0.813288
| 0
| 0.024528
| 0.286868
| 3,716
| 132
| 110
| 28.151515
| 0.753585
| 0.241389
| 0
| 0.65
| 0
| 0
| 0.021637
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.033333
| 0.05
| 0.533333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
9032cb5d98108d8a3d8d4cf18c6f15ab7b0048ac
| 7,293
|
py
|
Python
|
data_collection/xmltodb.py
|
pinckert/pinckert.com
|
7d6ce3e37c6c39c1deebbceb40f4442d34c20e23
|
[
"Apache-2.0"
] | 1
|
2020-07-29T22:45:07.000Z
|
2020-07-29T22:45:07.000Z
|
data_collection/xmltodb.py
|
pinckert/pinckert.com
|
7d6ce3e37c6c39c1deebbceb40f4442d34c20e23
|
[
"Apache-2.0"
] | null | null | null |
data_collection/xmltodb.py
|
pinckert/pinckert.com
|
7d6ce3e37c6c39c1deebbceb40f4442d34c20e23
|
[
"Apache-2.0"
] | null | null | null |
<<<<<<< HEAD
#!/usr/bin/python
print "Content-type:text/html\n\n"
import os
import sys
import _mysql
import MySQLdb
import ../cgi_bin/db_util
import xml.etree.ElementTree as ET
#
# Migrate XML build files to DB
# -- Need to do this on server side since DB connection not allowed from local client
#
# To-do
# Common import for definitions used in client and server (also used in "createTables.py")
# Ignore 'in-progress' builds
# Ignore project xml files
# Re-do directory structure to include a project root directory
#
# for testing without DB accessible
db_present = 1
# configuration for data collection
ubuntu = {"root" : "wily", "table" : "ubuntu_builds"}
apache = {"root" : "A-D", "table" : "apache_builds"}
project_list = [apache]
#
# Return an SQL insert statement using a dictionary as columns/values
#
#-----
def buildSQLInsert(d, table):
columns = ', '.join(d.keys())
values = d.values()
values = ', '.join(values)
sql = "INSERT IGNORE INTO %s ( %s ) VALUES ( %s )" % (table, columns, values)
return sql
#
# Return a dict of row names and values from a build.xml
#-----
def parseBuildData(xml_tree):
values = {}
# map of xml key name to db column name
# must match createTables.py
tags = { "result" : "result",
"number" : "number",
"timestamp" : "start",
"duration" : "duration",
"builtOn" : "server"
}
build = xml_tree.getroot()
for element in build:
for tag in tags.keys():
if element.tag == tag:
values[tags[tag]] = "'%s'" % element.text
values["user"] = getUser(build)
return values
#-----
# It's possible to have multiple 'culprits'. For now just get the first one.
#
def getUser(build):
name = "<none>"
try:
name = build.find("changeSet").find("item").find("author").find("fullName")
user = name.text
except: # Exception if "culprit" tag isn't present (e.g. triggered by upstream build)
user = "<none>"
return "'%s'" % user
def appendBuilds(file_list, table_name):
for file in file_list:
# process all directories
if os.path.isdir(file):
build_list = os.listdir(file)
#
# get data for each build
#
for build in build_list:
print "Directory: %s, File %s<br>" % (file, build)
if file == (build.replace(".xml", "")): # skip if it's the project file.
continue
build_data = {}
try:
print "attempting to parse " + file + "/" + build + "<br>"
tree = ET.parse(file + "/" + build)
build_data.update(parseBuildData(tree))
build_data["project_name"] = "'%s'" % file
except Exception, err:
print "Parse failure : " + str(err.args[0])
#
# Add the data to the build table.
#
sql = buildSQLInsert(build_data, table_name)
print "Executing -> " + sql + "<br>"
if db_present:
try:
db.execute(sql)
connection.commit() # necessary to commit after each execute??
except MySQLdb.Error, e:
print "Error %d: %s<br>" % (e.args[0], e.args[1])
#------------------------------------------------------------
#
# *Main*
#
if db_present:
try:
db_info = db_util.db_cred()
connection = MySQLdb.connect (host = db_info["host"], user = db_info["user"], passwd = db_info["passwd"], db = db_info["db"])
db = connection.cursor()
except MySQLdb.Error, e:
print "Error %d: %s<br>" % (e.args[0], e.args[1])
print dbg
#
# Cycle through directories based on projects
#
for proj in project_list:
print "<h2> --- Beginning data import for proj %s </h2>" % proj
os.chdir(proj["root"])
file_list = os.listdir(".")
appendBuilds(file_list, proj["table"])
os.chdir("..")
if db_present:
=======
#!/usr/bin/python
print "Content-type:text/html\n\n"
import os
import sys
import _mysql
import MySQLdb
import ../cgi_bin/db_util
import xml.etree.ElementTree as ET
#
# Migrate XML build files to DB
# -- Need to do this on server side since DB connection not allowed from local client
#
# To-do
# Common import for definitions used in client and server (also used in "createTables.py")
# Ignore 'in-progress' builds
# Ignore project xml files
# Re-do directory structure to include a project root directory
#
# for testing without DB accessible
db_present = 1
# configuration for data collection
ubuntu = {"root" : "wily", "table" : "ubuntu_builds"}
apache = {"root" : "A-D", "table" : "apache_builds"}
project_list = [apache]
#
# Return an SQL insert statement using a dictionary as columns/values
#
#-----
def buildSQLInsert(d, table):
columns = ', '.join(d.keys())
values = d.values()
values = ', '.join(values)
sql = "INSERT IGNORE INTO %s ( %s ) VALUES ( %s )" % (table, columns, values)
return sql
#
# Return a dict of row names and values from a build.xml
#-----
def parseBuildData(xml_tree):
values = {}
# map of xml key name to db column name
# must match createTables.py
tags = { "result" : "result",
"number" : "number",
"timestamp" : "start",
"duration" : "duration",
"builtOn" : "server"
}
build = xml_tree.getroot()
for element in build:
for tag in tags.keys():
if element.tag == tag:
values[tags[tag]] = "'%s'" % element.text
values["user"] = getUser(build)
return values
#-----
# It's possible to have multiple 'culprits'. For now just get the first one.
#
def getUser(build):
name = "<none>"
try:
name = build.find("changeSet").find("item").find("author").find("fullName")
user = name.text
except: # Exception if "culprit" tag isn't present (e.g. triggered by upstream build)
user = "<none>"
return "'%s'" % user
def appendBuilds(file_list, table_name):
for file in file_list:
# process all directories
if os.path.isdir(file):
build_list = os.listdir(file)
#
# get data for each build
#
for build in build_list:
print "Directory: %s, File %s<br>" % (file, build)
if file == (build.replace(".xml", "")): # skip if it's the project file.
continue
build_data = {}
try:
print "attempting to parse " + file + "/" + build + "<br>"
tree = ET.parse(file + "/" + build)
build_data.update(parseBuildData(tree))
build_data["project_name"] = "'%s'" % file
except Exception, err:
print "Parse failure : " + str(err.args[0])
#
# Add the data to the build table.
#
sql = buildSQLInsert(build_data, table_name)
print "Executing -> " + sql + "<br>"
if db_present:
try:
db.execute(sql)
connection.commit() # necessary to commit after each execute??
except MySQLdb.Error, e:
print "Error %d: %s<br>" % (e.args[0], e.args[1])
#------------------------------------------------------------
#
# *Main*
#
if db_present:
try:
db_info = db_util.db_cred()
connection = MySQLdb.connect (host = db_info["host"], user = db_info["user"], passwd = db_info["passwd"], db = db_info["db"])
db = connection.cursor()
except MySQLdb.Error, e:
print "Error %d: %s<br>" % (e.args[0], e.args[1])
print dbg
#
# Cycle through directories based on projects
#
for proj in project_list:
print "<h2> --- Beginning data import for proj %s </h2>" % proj
os.chdir(proj["root"])
file_list = os.listdir(".")
appendBuilds(file_list, proj["table"])
os.chdir("..")
if db_present:
>>>>>>> cb97fa344060fddee1b1b68722c1e6b281f454c7
connection.close()
| 26.423913
| 127
| 0.627588
| 1,002
| 7,293
| 4.501996
| 0.186627
| 0.019951
| 0.014631
| 0.012414
| 0.986921
| 0.986921
| 0.986921
| 0.986921
| 0.986921
| 0.986921
| 0
| 0.006919
| 0.207322
| 7,293
| 276
| 128
| 26.423913
| 0.773396
| 0.28685
| 0
| 0.962963
| 0
| 0
| 0.177413
| 0.01016
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.012346
| 0.08642
| null | null | 0.111111
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
90379bc721c84c74f05cc57f6efb54bcccd3b77f
| 2,785
|
py
|
Python
|
src/datasetcrux/csvfun.py
|
datasetcrux/datasetcrux
|
6c78ddfca2f63b086460f6c45c139e263c186583
|
[
"MIT"
] | null | null | null |
src/datasetcrux/csvfun.py
|
datasetcrux/datasetcrux
|
6c78ddfca2f63b086460f6c45c139e263c186583
|
[
"MIT"
] | null | null | null |
src/datasetcrux/csvfun.py
|
datasetcrux/datasetcrux
|
6c78ddfca2f63b086460f6c45c139e263c186583
|
[
"MIT"
] | null | null | null |
def csv_printer(filepath):
import csv
import sys
with open(filepath, newline='') as f:
reader = csv.reader(f)
try:
for row in reader:
print(row)
except csv.Error as e:
sys.exit('file {}, line {}: {}'.format(filepath, reader.line_num, e))
def csv_reader(filepath):
import csv
import sys
with open(filepath, newline='') as f:
reader = csv.reader(f)
try:
for row in reader:
yield row
except csv.Error as e:
sys.exit('file {}, line {}: {}'.format(filepath, reader.line_num, e))
def csv_reader_dict(filepath):
import csv
import sys
with open(filepath, newline='') as f:
reader = csv.DictReader(f)
try:
for row in reader:
yield row
except csv.Error as e:
sys.exit('file {}, line {}: {}'.format(filepath, reader.line_num, e))
def csv_writer(filepath, data):
import csv
import sys
with open(filepath, 'w', newline='') as f:
writer = csv.writer(f)
try:
writer.writerows(data)
except csv.Error as e:
sys.exit('file {}, line {}: {}'.format(filepath, writer.line_num, e))
def csv_writer_append(filepath, data):
import csv
import sys
with open(filepath, 'a', newline='') as f:
writer = csv.writer(f)
try:
writer.writerows(data)
except csv.Error as e:
sys.exit('file {}, line {}: {}'.format(filepath, writer.line_num, e))
def csv_writer_append_dict(filepath, data):
import csv
import sys
with open(filepath, 'a', newline='') as f:
writer = csv.DictWriter(f, data[0].keys())
try:
writer.writerows(data)
except csv.Error as e:
sys.exit('file {}, line {}: {}'.format(filepath, writer.line_num, e))
def csv_writer_append_dict_header(filepath, data):
import csv
import sys
with open(filepath, 'a', newline='') as f:
writer = csv.DictWriter(f, data[0].keys())
try:
writer.writeheader()
writer.writerows(data)
except csv.Error as e:
sys.exit('file {}, line {}: {}'.format(filepath, writer.line_num, e))
def csv_writer_append_dict_header_if_not_exist(filepath, data):
import csv
import sys
with open(filepath, 'a', newline='') as f:
writer = csv.DictWriter(f, data[0].keys())
try:
if not writer.has_header():
writer.writeheader()
writer.writerows(data)
except csv.Error as e:
sys.exit('file {}, line {}: {}'.format(filepath, writer.line_num, e))
| 31.647727
| 81
| 0.545781
| 346
| 2,785
| 4.306358
| 0.124277
| 0.032215
| 0.080537
| 0.096644
| 0.957718
| 0.957718
| 0.953691
| 0.953691
| 0.953691
| 0.922819
| 0
| 0.001599
| 0.326391
| 2,785
| 87
| 82
| 32.011494
| 0.792644
| 0
| 0
| 0.846154
| 0
| 0
| 0.059289
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.102564
| false
| 0
| 0.205128
| 0
| 0.307692
| 0.025641
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
904a9779aa3ad8885147ad6c6cda8b4a0c70954f
| 30
|
py
|
Python
|
PyCharm/primer3.py
|
PervykhDarya/laba10
|
631e676d9683f3b1d4e976778bf9ddf0f8f5cfc3
|
[
"MIT"
] | null | null | null |
PyCharm/primer3.py
|
PervykhDarya/laba10
|
631e676d9683f3b1d4e976778bf9ddf0f8f5cfc3
|
[
"MIT"
] | null | null | null |
PyCharm/primer3.py
|
PervykhDarya/laba10
|
631e676d9683f3b1d4e976778bf9ddf0f8f5cfc3
|
[
"MIT"
] | null | null | null |
a = {0, 1, 2, 3}
print(2 in a)
| 15
| 16
| 0.466667
| 9
| 30
| 1.555556
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.227273
| 0.266667
| 30
| 2
| 17
| 15
| 0.409091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
396d16b8ebff8a8279a1b86181ab53ca7cb10d9e
| 2,304
|
py
|
Python
|
spm/bin_SMF/create_table1.py
|
AndresSixtos/pyeBOSS
|
4750908c8bc409633bef8f790133e3a1f3f0c9e4
|
[
"CC0-1.0"
] | 1
|
2017-05-23T13:03:27.000Z
|
2017-05-23T13:03:27.000Z
|
spm/bin_SMF/create_table1.py
|
AndresSixtos/pyeBOSS
|
4750908c8bc409633bef8f790133e3a1f3f0c9e4
|
[
"CC0-1.0"
] | null | null | null |
spm/bin_SMF/create_table1.py
|
AndresSixtos/pyeBOSS
|
4750908c8bc409633bef8f790133e3a1f3f0c9e4
|
[
"CC0-1.0"
] | 2
|
2017-09-26T11:17:30.000Z
|
2021-09-14T06:09:18.000Z
|
from lib_spm import *
imfs = ["Chabrier_ELODIE_", "Chabrier_MILES_", "Chabrier_STELIB_", "Kroupa_ELODIE_", "Kroupa_MILES_", "Kroupa_STELIB_", "Salpeter_ELODIE_", "Salpeter_MILES_", "Salpeter_STELIB_" ]
#"""
out_file = os.path.join(os.environ['OBS_REPO'], 'spm', 'results', "table_1_r.tex")
f=open(out_file, 'w')
for IMF in imfs :
prf = IMF.split('_')[0]+' & '+IMF.split('_')[1]
l2w = get_basic_stat_deep2(deep2, 'ZBEST', 'ZQUALITY', prf, 2., IMF, o2=False)
f.write(l2w + " \n")
f.write('\\hline \n')
#l2w = get_basic_stat_DR12(boss_12_portSF_kr, 'Z', 'Z_ERR', 'Portsmouth Kroupa Star-Forming & BOSS & 12 ', 0.)
#f.write(l2w + " \n")
#l2w = get_basic_stat_DR12(boss_12_portPA_kr, 'Z', 'Z_ERR', 'Portsmouth Kroupa Passive & BOSS & 12 ', 0.)
#f.write(l2w + " \n")
#l2w = get_basic_stat_DR12(boss_12_portSF_sa, 'Z', 'Z_ERR', 'Portsmouth Salpeter Star-Forming & BOSS & 12 ', 0.)
#f.write(l2w + " \n")
#l2w = get_basic_stat_DR12(boss_12_portPA_sa, 'Z', 'Z_ERR', 'Portsmouth Salpeter Passive & BOSS & 12 ', 0.)
#f.write(l2w + " \n")
for IMF in imfs :
prf = IMF.split('_')[0]+' & '+IMF.split('_')[1]
l2w = get_basic_stat_firefly_DR14(boss, 'Z_NOQSO', 'Z_ERR_NOQSO', 'CLASS_NOQSO', 'ZWARNING_NOQSO', prf, 0., IMF)
f.write(l2w + " \n")
f.write('\\hline \n')
#l2w = get_basic_stat_DR12(sdss_12_portSF_kr, 'Z', 'Z_ERR', 'Portsmouth Kroupa Star-Forming & SDSS & 12 ', 0.)
#f.write(l2w + " \n")
#l2w = get_basic_stat_DR12(sdss_12_portPA_kr, 'Z', 'Z_ERR', 'Portsmouth Kroupa Passive & SDSS & 12 ', 0.)
#f.write(l2w + " \n")
#l2w = get_basic_stat_DR12(sdss_12_portSF_sa, 'Z', 'Z_ERR', 'Portsmouth Salpeter Star-Forming & SDSS & 12 ', 0.)
#f.write(l2w + " \n")
#l2w = get_basic_stat_DR12(sdss_12_portPA_sa, 'Z', 'Z_ERR', 'Portsmouth Salpeter Passive & SDSS & 12 ', 0.)
#f.write(l2w + " \n")
for IMF in imfs :
prf = IMF.split('_')[0]+' & '+IMF.split('_')[1]
l2w = get_basic_stat_firefly_DR14(sdss, 'Z', 'Z_ERR', 'CLASS', 'ZWARNING', prf, 0., IMF)
f.write(l2w + " \n")
f.write('\\hline \n')
f.close()
#"""
out_file = os.path.join(os.environ['OBS_REPO'], 'spm', 'results', "table_2_r.tex")
f=open(out_file, 'w')
for IMF in imfs :
prf = IMF.split('_')[0]+' & '+IMF.split('_')[1]
l2w = get_basic_stat_deep2(deep2, 'ZBEST', 'ZQUALITY', prf, 2., IMF, o2=True)
f.write(l2w + " \n")
f.close()
| 40.421053
| 180
| 0.643663
| 379
| 2,304
| 3.614776
| 0.176781
| 0.065693
| 0.09635
| 0.131387
| 0.837226
| 0.829197
| 0.829197
| 0.829197
| 0.823358
| 0.727007
| 0
| 0.05234
| 0.137587
| 2,304
| 56
| 181
| 41.142857
| 0.637141
| 0.450521
| 0
| 0.703704
| 0
| 0
| 0.279872
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.037037
| 0
| 0.037037
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
39cb957ec67211561d8d7c1a1c6f962125d9fbeb
| 84
|
py
|
Python
|
biopymlff/util/getenv.py
|
saandre15/biopymlff
|
ec90370a8c03c51426bd24477034c9413bdcdb04
|
[
"MIT"
] | null | null | null |
biopymlff/util/getenv.py
|
saandre15/biopymlff
|
ec90370a8c03c51426bd24477034c9413bdcdb04
|
[
"MIT"
] | null | null | null |
biopymlff/util/getenv.py
|
saandre15/biopymlff
|
ec90370a8c03c51426bd24477034c9413bdcdb04
|
[
"MIT"
] | null | null | null |
import toml
import os
def getenv():
return toml.load(os.getcwd() + "/env.toml")
| 16.8
| 47
| 0.666667
| 13
| 84
| 4.307692
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 84
| 5
| 47
| 16.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0.105882
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
845f30f87e8c7ca9750f33a617a8e8117f03a4df
| 7,582
|
py
|
Python
|
vfo_test.py
|
mbbatukan/vfo
|
27662cba01c713a5f09dcbf1a765e1120d6ad8a2
|
[
"MIT"
] | 3
|
2021-12-29T23:24:16.000Z
|
2022-03-21T12:10:29.000Z
|
vfo_test.py
|
mbbatukan/vfo
|
27662cba01c713a5f09dcbf1a765e1120d6ad8a2
|
[
"MIT"
] | 3
|
2021-11-28T20:11:27.000Z
|
2022-03-25T18:52:09.000Z
|
vfo_test.py
|
mbbatukan/vfo
|
27662cba01c713a5f09dcbf1a765e1120d6ad8a2
|
[
"MIT"
] | 3
|
2022-01-12T05:55:12.000Z
|
2022-03-24T13:04:48.000Z
|
####################################################
##
## Test Examples
##
####################################################
import numpy as np
import openseespy.opensees as ops
import vfo.vfo as vfo
import math
def portalframe2d():
print("portal frame 2D test")
ops.wipe()
# set modelbuilder
ops.model('basic', '-ndm', 2, '-ndf', 3)
############################################
### Units and Constants ###################
############################################
inch = 1;
kip = 1;
sec = 1;
# Dependent units
sq_in = inch*inch;
ksi = kip/sq_in;
ft = 12*inch;
# Constants
g = 386.2*inch/(sec*sec);
pi = math.acos(-1);
#######################################
##### Dimensions
#######################################
# Dimensions Input
H_story=10.0*ft;
W_bayX=16.0*ft;
W_bayY_ab=5.0*ft+10.0*inch;
W_bayY_bc=8.0*ft+4.0*inch;
W_bayY_cd=5.0*ft+10.0*inch;
# Calculated dimensions
W_structure=W_bayY_ab+W_bayY_bc+W_bayY_cd;
# ###########################
# ##### Nodes
# ###########################
# Create All main nodes
ops.node(1, 0.0, 0.0)
ops.node(2, W_bayX, 0.0)
ops.node(11, 0.0, H_story)
ops.node(12, W_bayX, H_story)
# ###############
# Constraints
# ###############
ops.fix(1, 1, 1, 1)
ops.fix(2, 1, 1, 1)
# #######################
# ### Elements
# #######################
ColTransfTag=1
BeamTranfTag=2
ops.geomTransf('Linear', ColTransfTag)
ops.geomTransf('Linear', BeamTranfTag)
# Assign Elements ##############
# ## Add non-linear column elements
ops.element('elasticBeamColumn', 1, 1, 11, 20., 1000., 1000., ColTransfTag, '-mass', 0.0)
ops.element('elasticBeamColumn', 2, 2, 12, 20., 1000., 1000., ColTransfTag, '-mass', 0.0)
# ### Add linear main beam elements, along x-axis
ops.element('elasticBeamColumn', 101, 11, 12, 20., 1000., 1000., BeamTranfTag, '-mass', 0.0)
# Visualize the model
return vfo.plot_model()
def portalframe3d():
print("portal frame 3D test")
ops.wipe()
# set modelbuilder
ops.model('basic', '-ndm', 3, '-ndf', 6)
############################################
### Units and Constants ###################
############################################
inch = 1;
kip = 1;
sec = 1;
# Dependent units
sq_in = inch*inch;
ksi = kip/sq_in;
ft = 12*inch;
# Constants
g = 386.2*inch/(sec*sec);
pi = math.acos(-1);
#######################################
##### Dimensions
#######################################
# Dimensions Input
H_story=10.0*ft;
W_bayX=16.0*ft;
W_bayY_ab=5.0*ft+10.0*inch;
W_bayY_bc=8.0*ft+4.0*inch;
W_bayY_cd=5.0*ft+10.0*inch;
# Calculated dimensions
W_structure=W_bayY_ab+W_bayY_bc+W_bayY_cd;
# ###########################
# ##### Nodes
# ###########################
# Create All main nodes
ops.node(1, 0.0, 0.0, 0.0)
ops.node(2, W_bayX, 0.0, 0.0)
ops.node(11, 0.0, H_story, 0.0)
ops.node(12, W_bayX, H_story, 0.0)
# ###############
# Constraints
# ###############
ops.fix(1, 1, 1, 1, 1, 1, 1)
ops.fix(2, 1, 1, 1, 1, 1, 1)
# #######################
# ### Elements
# #######################
ColTransfTag=1
BeamTranfTag=2
ops.geomTransf('Linear', ColTransfTag, 1, 0, 0)
ops.geomTransf('Linear', BeamTranfTag, 0, 0, 1)
# Assign Elements ##############
# ## Add non-linear column elements
ops.element('elasticBeamColumn', 1, 1, 11, 20., 1000., 1000., 1000., 1000., 1000., ColTransfTag, '-mass', 0.0)
ops.element('elasticBeamColumn', 2, 2, 12, 20., 1000., 1000., 1000., 1000., 1000.,ColTransfTag, '-mass', 0.0)
# ### Add linear main beam elements, along x-axis
ops.element('elasticBeamColumn', 101, 11, 12, 20., 1000., 1000., 1000., 1000., 1000.,BeamTranfTag, '-mass', 0.0)
# Visualize the model
return vfo.plot_model()
def tri3d():
print("portal frame 3D test")
ops.wipe()
# set modelbuilder
ops.model('basic', '-ndm', 3, '-ndf', 3)
# Create All main nodes
ops.node(1, 0.0, 0.0, 0.0)
ops.node(2, 10.0, 0.0, 0.0)
ops.node(3, 0.0, 10.0, 0.0)
# ###############
# Constraints
# ###############
ops.fix(1, 1, 1, 1)
ops.fix(2, 1, 1, 1)
ops.nDMaterial("ElasticIsotropic", 1, 1000.0, 0.25, 6.75)
# Assign Elements ##############
# ## Add non-linear column elements
ops.element('Tri31', 1, 1,2,3, 0.1, 'PlaneStress', 1)
# Visualize the model
return vfo.plot_model()
def quad2d():
print("quad3d 3D test")
ops.wipe()
# set modelbuilder
ops.model('basic', '-ndm', 2, '-ndf', 2)
# ###########################
# ##### Nodes
# ###########################
# Create All main nodes
ops.node(1, 0.0, 0.0)
ops.node(2, 50.0, 0.0)
ops.node(3, 0.0, 30.0)
ops.node(4, 50.0, 30.0)
# ops.fix(1, 1, 1, 1, 1, 1, 1)
# ops.fix(2, 1, 1, 1, 1, 1, 1)
ops.fix(1, 1, 1)
ops.fix(2, 1, 1)
ops.nDMaterial("ElasticIsotropic", 1, 1000.0, 0.25, 6.75)
# Assign Elements ##############
# ## Add non-linear column elements
ops.element('quad', 1, 1,2,4,3, 0.1, 'PlaneStress', 1)
# vfo.createODB("Test_mvlem3d", "none", 0)
# Visualize the model
return vfo.plot_model()
def mvlem_3d():
print("MVLEM 3D test")
ops.wipe()
# set modelbuilder
ops.model('basic', '-ndm', 3, '-ndf', 6)
# ###########################
# ##### Nodes
# ###########################
# Create All main nodes
ops.node(1, 0.0, 0.0, 0.0)
ops.node(2, 50.0, 0.0, 0.0)
ops.node(3, 0.0, 0.0, 30.0)
ops.node(4, 50.0, 0.0, 30.0)
ops.node(5, 0.0, 0.0, 60.0)
ops.node(6, 50.0, 0.0, 60.0)
ops.fix(1, 1, 1, 1, 1, 1, 1)
ops.fix(2, 1, 1, 1, 1, 1, 1)
ops.uniaxialMaterial('Elastic', 1, 314705)
# Concrete Materials
ops.uniaxialMaterial('Concrete02', 201, -7.934, -0.0023, 0, -0.01, 0.079, 0.356292015066294, 253.858060734734)
# Steel Materials
ops.uniaxialMaterial('SteelMPF', 301, 68.313, 68.313, 27847.246, 0.0055, 0.0055, 20, 0.925, 0.15)
# Assign Elements ##############
ops.element('MVLEM_3D', 1,1,2,4,3, 10, '-thick', *[3.937, 3.937, 3.937, 3.937, 3.937, 3.937, 3.937, 3.937, 3.937, 3.937],
'-width', *[5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0],
'-rho', *[0.0387, 0.0387, 0.00342, 0.00342, 0.00342, 0.00342, 0.00342, 0.00342, 0.0226, 0.0226],
'-matConcrete', *[201, 201, 201, 201, 201, 201, 201, 201, 201, 201],
'-matSteel', *[301, 301, 301, 301, 301, 301, 301, 301, 301, 301], '-matShear', 1)
ops.element('MVLEM_3D', 2,3,4,6,5, 10, '-thick', *[3.937, 3.937, 3.937, 3.937, 3.937, 3.937, 3.937, 3.937, 3.937, 3.937],
'-width', *[5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0],
'-rho', *[0.0387, 0.0387, 0.00342, 0.00342, 0.00342, 0.00342, 0.00342, 0.00342, 0.0226, 0.0226],
'-matConcrete', *[201, 201, 201, 201, 201, 201, 201, 201, 201, 201],
'-matSteel', *[301, 301, 301, 301, 301, 301, 301, 301, 301, 301], '-matShear', 1)
# vfo.createODB("Test_mvlem3d", "none", 0)
# Visualize the model
return vfo.plot_model()
def tetra():
print("Tetrahedral test")
ops.wipe()
# set modelbuilder
ops.model('basic', '-ndm', 3, '-ndf', 3)
# Create All main nodes
ops.node(1, 0.0, 0.0, 0.0)
ops.node(2, 10.0, 0.0, 0.0)
ops.node(3, 5.0, 10.0, 0.0)
ops.node(4, 5.0, 5.0, 10.0)
# ###############
# Constraints
# ###############
ops.fix(1, 1, 1, 1)
ops.fix(2, 1, 1, 1)
ops.nDMaterial("ElasticIsotropic", 1, 1000.0, 0.25, 6.75)
# Assign Elements ##############
# ## Add non-linear column elements
ops.element('FourNodeTetrahedron', 1, 1,2,3,4, 1)
# Visualize the model
return vfo.plot_model()
fig1 = portalframe2d()
fig2 = portalframe3d()
fig3 = quad2d()
fig3 = mvlem_3d()
fog4 = tri3d()
fog5 = tetra()
| 23.473684
| 123
| 0.520047
| 1,176
| 7,582
| 3.306973
| 0.131803
| 0.039085
| 0.030856
| 0.024685
| 0.832605
| 0.818977
| 0.815377
| 0.800977
| 0.776292
| 0.755207
| 0
| 0.160931
| 0.17225
| 7,582
| 322
| 124
| 23.546584
| 0.458732
| 0.161171
| 0
| 0.572464
| 0
| 0
| 0.108583
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043478
| false
| 0
| 0.028986
| 0
| 0.115942
| 0.043478
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0801e0fdd3a99e398ab265ecb244ce2751971875
| 763
|
py
|
Python
|
Lib/site-packages/dataframejy/indexing.py
|
Yaqiang/jythonlab
|
d031d85e5bd5f19943c6a410c56ceb734c533534
|
[
"CNRI-Jython",
"Apache-2.0"
] | 2
|
2019-03-21T07:14:19.000Z
|
2020-06-23T12:53:15.000Z
|
Lib/site-packages/dataframejy/indexing.py
|
Yaqiang/jythonlab
|
d031d85e5bd5f19943c6a410c56ceb734c533534
|
[
"CNRI-Jython",
"Apache-2.0"
] | null | null | null |
Lib/site-packages/dataframejy/indexing.py
|
Yaqiang/jythonlab
|
d031d85e5bd5f19943c6a410c56ceb734c533534
|
[
"CNRI-Jython",
"Apache-2.0"
] | null | null | null |
class LocIndexer(object):
def __init__(self, dataobj):
self._dataobj = dataobj
def __getitem__(self, key):
return self._dataobj._getitem_loc(key)
class ILocIndexer(object):
def __init__(self, dataobj):
self._dataobj = dataobj
def __getitem__(self, key):
return self._dataobj._getitem_iloc(key)
class AtIndexer(object):
def __init__(self, dataobj):
self._dataobj = dataobj
def __getitem__(self, key):
return self._dataobj._getitem_at(key)
class IAtIndexer(object):
def __init__(self, dataobj):
self._dataobj = dataobj
def __getitem__(self, key):
return self._dataobj._getitem_iat(key)
| 23.84375
| 47
| 0.608126
| 80
| 763
| 5.2
| 0.2125
| 0.317308
| 0.125
| 0.163462
| 0.798077
| 0.798077
| 0.798077
| 0.798077
| 0.798077
| 0.798077
| 0
| 0
| 0.302752
| 763
| 32
| 48
| 23.84375
| 0.781955
| 0
| 0
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.2
| 0.8
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 10
|
f239a267c0771b65ac4be2a5942a160d6a1847da
| 8,480
|
py
|
Python
|
tests/test1.py
|
jvansteirteghem/twunnel
|
6de57636c7a9df03fe8452c5bc57071aad3719a7
|
[
"MIT"
] | 16
|
2015-02-09T11:45:05.000Z
|
2021-07-22T12:21:25.000Z
|
tests/test1.py
|
jvansteirteghem/twunnel
|
6de57636c7a9df03fe8452c5bc57071aad3719a7
|
[
"MIT"
] | null | null | null |
tests/test1.py
|
jvansteirteghem/twunnel
|
6de57636c7a9df03fe8452c5bc57071aad3719a7
|
[
"MIT"
] | 7
|
2015-02-23T18:38:17.000Z
|
2021-04-15T11:10:19.000Z
|
# -*- coding: utf-8 -*-
import sys
import os
sys.path.insert(0, os.path.abspath(".."))
from twisted.internet import protocol
from twisted.trial import unittest
from twisted.test import proto_helpers
import base64
import struct
import socket
from twunnel import proxy_server__https, proxy_server__socks5
class TestTunnelProtocol(protocol.Protocol):
def __init__(self):
pass
def connectionMade(self):
pass
def connectionLost(self, reason):
pass
def dataReceived(self, data):
pass
def tunnelOutputProtocol_connectionMade(self, data):
pass
class HTTPSTunnelTestCase(unittest.TestCase):
def setUp(self):
self.configuration = \
{
"PROXY_SERVER":
{
"TYPE": "HTTPS",
"ADDRESS": "127.0.0.1",
"PORT": 8080,
"ACCOUNT":
{
"NAME": "",
"PASSWORD": ""
}
}
}
self.address = "127.0.0.1"
self.port = 80
self.tunnelOutputProtocolFactory = proxy_server__https.HTTPSTunnelOutputProtocolFactory(self.configuration, self.address, self.port)
self.tunnelOutputProtocolFactory.tunnelProtocol = TestTunnelProtocol()
self.tunnelOutputProtocol = self.tunnelOutputProtocolFactory.buildProtocol((self.address, self.port))
self.transport = proto_helpers.StringTransport()
self.tunnelOutputProtocol.makeConnection(self.transport)
def tearDown(self):
self.transport.loseConnection()
def test(self):
value = self.transport.value()
self.transport.clear()
self.assertEqual(value, "CONNECT %s:%d HTTP/1.1\r\n\r\n" % (self.address, self.port))
self.tunnelOutputProtocol.dataReceived("HTTP/1.1 200 OK\r\n\r\n")
class HTTPSTunnelBasicAuthenticationTestCase(unittest.TestCase):
def setUp(self):
self.configuration = \
{
"PROXY_SERVER":
{
"TYPE": "HTTPS",
"ADDRESS": "127.0.0.1",
"PORT": 8080,
"ACCOUNT":
{
"NAME": "1",
"PASSWORD": "2"
}
}
}
self.address = "127.0.0.1"
self.port = 80
self.tunnelOutputProtocolFactory = proxy_server__https.HTTPSTunnelOutputProtocolFactory(self.configuration, self.address, self.port)
self.tunnelOutputProtocolFactory.tunnelProtocol = TestTunnelProtocol()
self.tunnelOutputProtocol = self.tunnelOutputProtocolFactory.buildProtocol((self.address, self.port))
self.transport = proto_helpers.StringTransport()
self.tunnelOutputProtocol.makeConnection(self.transport)
def tearDown(self):
self.transport.loseConnection()
def test(self):
value = self.transport.value()
self.transport.clear()
self.assertEqual(value, "CONNECT %s:%d HTTP/1.1\r\nProxy-Authorization: Basic %s\r\n\r\n" % (self.address, self.port, base64.standard_b64encode("%s:%s" % (self.configuration["PROXY_SERVER"]["ACCOUNT"]["NAME"], self.configuration["PROXY_SERVER"]["ACCOUNT"]["PASSWORD"]))))
self.tunnelOutputProtocol.dataReceived("HTTP/1.1 200 OK\r\n\r\n")
class SOCKS5TunnelIPv4TestCase(unittest.TestCase):
def setUp(self):
self.configuration = \
{
"PROXY_SERVER":
{
"TYPE": "SOCKS5",
"ADDRESS": "127.0.0.1",
"PORT": 1080,
"ACCOUNT":
{
"NAME": "",
"PASSWORD": ""
}
}
}
self.address = "127.0.0.1"
self.port = 80
self.tunnelOutputProtocolFactory = proxy_server__socks5.SOCKS5TunnelOutputProtocolFactory(self.configuration, self.address, self.port)
self.tunnelOutputProtocolFactory.tunnelProtocol = TestTunnelProtocol()
self.tunnelOutputProtocol = self.tunnelOutputProtocolFactory.buildProtocol((self.address, self.port))
self.transport = proto_helpers.StringTransport()
self.tunnelOutputProtocol.makeConnection(self.transport)
def tearDown(self):
self.transport.loseConnection()
def test(self):
value = self.transport.value()
self.transport.clear()
version, numberOfMethods = struct.unpack("!BB", value[:2])
value = value[2:]
methods = struct.unpack("!%dB" % numberOfMethods, value[:numberOfMethods])
value = value[numberOfMethods:]
self.assertEqual(version, 0x05)
self.assertEqual(numberOfMethods, 0x02)
self.assertEqual(methods[0], 0x00)
self.assertEqual(methods[1], 0x02)
value = struct.pack("!BB", 0x05, 0x00)
self.tunnelOutputProtocol.dataReceived(value)
value = self.transport.value()
self.transport.clear()
version, method, reserved, addressType = struct.unpack("!BBBB", value[:4])
value = value[4:]
self.assertEqual(version, 0x05)
self.assertEqual(method, 0x01)
self.assertEqual(reserved, 0x00)
self.assertEqual(addressType, 0x01)
address, port = struct.unpack("!IH", value[:6])
address = struct.pack("!I", address)
address = socket.inet_ntop(socket.AF_INET, address)
value = value[6:]
self.assertEqual(address, self.address)
self.assertEqual(port, self.port)
value = struct.pack("!BBBBIH", 0x05, 0x00, 0x00, 0x01, 0, 0)
self.tunnelOutputProtocol.dataReceived(value)
class SOCKS5TunnelDNTestCase(unittest.TestCase):
def setUp(self):
self.configuration = \
{
"PROXY_SERVER":
{
"TYPE": "SOCKS5",
"ADDRESS": "127.0.0.1",
"PORT": 1080,
"ACCOUNT":
{
"NAME": "",
"PASSWORD": ""
}
}
}
self.address = "localhost"
self.port = 80
self.tunnelOutputProtocolFactory = proxy_server__socks5.SOCKS5TunnelOutputProtocolFactory(self.configuration, self.address, self.port)
self.tunnelOutputProtocolFactory.tunnelProtocol = TestTunnelProtocol()
self.tunnelOutputProtocol = self.tunnelOutputProtocolFactory.buildProtocol((self.address, self.port))
self.transport = proto_helpers.StringTransport()
self.tunnelOutputProtocol.makeConnection(self.transport)
def tearDown(self):
self.transport.loseConnection()
def test(self):
value = self.transport.value()
self.transport.clear()
version, numberOfMethods = struct.unpack("!BB", value[:2])
value = value[2:]
methods = struct.unpack("!%dB" % numberOfMethods, value[:numberOfMethods])
value = value[numberOfMethods:]
self.assertEqual(version, 0x05)
self.assertEqual(numberOfMethods, 0x02)
self.assertEqual(methods[0], 0x00)
self.assertEqual(methods[1], 0x02)
value = struct.pack("!BB", 0x05, 0x00)
self.tunnelOutputProtocol.dataReceived(value)
value = self.transport.value()
self.transport.clear()
version, method, reserved, addressType = struct.unpack("!BBBB", value[:4])
value = value[4:]
self.assertEqual(version, 0x05)
self.assertEqual(method, 0x01)
self.assertEqual(reserved, 0x00)
self.assertEqual(addressType, 0x03)
addressLength, = struct.unpack("!B", value[:1])
value = value[1:]
address, port = struct.unpack("!%dsH" % addressLength, value[:addressLength + 2])
value = value[addressLength + 2:]
self.assertEqual(address, self.address)
self.assertEqual(port, self.port)
value = struct.pack("!BBBBIH", 0x05, 0x00, 0x00, 0x01, 0, 0)
self.tunnelOutputProtocol.dataReceived(value)
if __name__ == "__main__":
unittest.main()
| 33.385827
| 279
| 0.574292
| 748
| 8,480
| 6.450535
| 0.152406
| 0.064663
| 0.037306
| 0.039378
| 0.818031
| 0.803523
| 0.803523
| 0.803523
| 0.794197
| 0.794197
| 0
| 0.035304
| 0.31191
| 8,480
| 254
| 280
| 33.385827
| 0.791602
| 0.002476
| 0
| 0.680628
| 0
| 0.005236
| 0.063845
| 0.003783
| 0
| 0
| 0.013242
| 0
| 0.115183
| 1
| 0.089005
| false
| 0.052356
| 0.04712
| 0
| 0.162304
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
f29082b0b73e349e56da75f902298c808668c739
| 27,960
|
py
|
Python
|
api/test/importer/testAdvancedImportTeam.py
|
fras2560/mlsb-platform
|
17aee2c1e220860b200662fd67d2fc697a3d8abb
|
[
"Apache-2.0"
] | 1
|
2016-05-06T15:37:35.000Z
|
2016-05-06T15:37:35.000Z
|
api/test/importer/testAdvancedImportTeam.py
|
Major-League-Summer-Baseball/mlsb-platform
|
ecb2a6a15dcaa12c4e18a6d9c5d1b4caf83e05a4
|
[
"Apache-2.0"
] | 42
|
2021-03-12T23:18:30.000Z
|
2022-03-13T20:57:36.000Z
|
api/test/importer/testAdvancedImportTeam.py
|
Major-League-Summer-Baseball/mlsb-platform
|
ecb2a6a15dcaa12c4e18a6d9c5d1b4caf83e05a4
|
[
"Apache-2.0"
] | 1
|
2019-04-21T23:24:54.000Z
|
2019-04-21T23:24:54.000Z
|
'''
@author: Dallas Fraser
@author: 2019-03-31
@organization: MLSB API
@summary: Tests the importing of team csv
'''
from sqlalchemy import func
from datetime import date
from base64 import b64encode
from api.model import Team
from api.advanced.import_team import parse_lines, BACKGROUND, HEADERS,\
INVALID_ROW, extract_player_information,\
extract_players,\
extract_column_indices_lookup,\
extract_background, TeamList
from api.test.BaseTest import TestSetup, ADMIN, PASSWORD
from api.errors import InvalidField, SponsorDoesNotExist, LeagueDoesNotExist
from api.test.importer.testImportMockSession import TestImportMockSession
headers = {
'Authorization': 'Basic %s' % b64encode(bytes(ADMIN + ':' +
PASSWORD, "utf-8")
).decode("ascii")
}
VALID_YEAR = date.today().year
INVALID_YEAR = 100
class TestTeamImportParseLines(TestSetup):
def testParseLines(self):
"""Test a a valid file in the standard format"""
sponsor = "Test Import Sponsor"
color = "Blue"
captain = "Test Captain"
league = "Test Import League"
lines = ["{}:,{},".format(BACKGROUND['sponsor_name'], sponsor),
"{}:,{},".format(BACKGROUND['team_color'], color),
"{}:,{},".format(BACKGROUND['captain_name'], captain),
"{}:,{},".format(BACKGROUND['league_name'], league),
"{},{},{}".format(HEADERS['name'],
HEADERS['email'],
HEADERS['gender']),
"Test Captain,testcaptainimport@mlsb.ca,M",
"Test Girl,testgirlimport@mlsb.ca,F",
"Test Boy,testboyimport@mlsb.ca,M"]
# parse the lines
result = parse_lines(lines)
# expecting no warnings
self.assertEqual(result['warnings'], [], "Expected no warnings")
# check background
expected_background = {'sponsor': sponsor,
'color': color,
'captain': captain,
'league': league}
error = "Failed parsing background"
self.output(result['background'])
self.output(expected_background)
self.assertEqual(result['background'], expected_background, error)
# check header
expected_header = [HEADERS['name'],
HEADERS['email'],
HEADERS['gender']]
error = "Failed parsing header"
self.output(result['header'])
self.output(expected_header)
self.assertEqual(result['header'], expected_header, error)
# check the players
expected_players = [player.split(",") for player in lines[-3:]]
self.assertEqual(result['players'],
expected_players,
"Players not returned")
def testParseLinesOrder(self):
"""Test that the order of a valid file does not matter"""
sponsor = "Test Import Sponsor"
color = "Blue"
captain = "Test Captain"
league = "Test Import League"
lines = [
"{},{},{}".format(HEADERS['name'],
HEADERS['email'],
HEADERS['gender']),
"{}:,{},".format(BACKGROUND['league_name'], league),
"Test Captain,testcaptainimport@mlsb.ca,M",
"{}:,{},".format(BACKGROUND['captain_name'], captain),
"Test Girl,testgirlimport@mlsb.ca,F",
"{}:,{},".format(BACKGROUND['team_color'], color),
"Test Boy,testboyimport@mlsb.ca,M",
"{}:,{},".format(BACKGROUND['sponsor_name'], sponsor)
]
# parse the lines
result = parse_lines(lines)
# expecting no warnings
self.assertEqual(result['warnings'], [], "Expected no warnings")
# check background
expected_background = {'sponsor': sponsor,
'color': color,
'captain': captain,
'league': league}
error = "Failed parsing background"
self.output(result['background'])
self.output(expected_background)
self.assertEqual(result['background'], expected_background, error)
# check header
expected_header = [HEADERS['name'],
HEADERS['email'],
HEADERS['gender']]
error = "Failed parsing header"
self.output(result['header'])
self.output(expected_header)
self.assertEqual(result['header'], expected_header, error)
# check the players
expected_players = [["Test Captain", "testcaptainimport@mlsb.ca", "M"],
["Test Girl", "testgirlimport@mlsb.ca", "F"],
["Test Boy", "testboyimport@mlsb.ca", "M"]]
self.assertEqual(result['players'],
expected_players,
"Players not returned")
def testParseLinesDelimiter(self):
"""Test using a different delimiter"""
sponsor = "Test Import Sponsor"
color = "Blue"
captain = "Test Captain"
league = "Test Import League"
lines = ["{}:|{}|".format(BACKGROUND['sponsor_name'], sponsor),
"{}:|{}|".format(BACKGROUND['team_color'], color),
"{}:|{}|".format(BACKGROUND['captain_name'], captain),
"{}:|{}|".format(BACKGROUND['league_name'], league),
"{}|{}|{}".format(HEADERS['name'],
HEADERS['email'],
HEADERS['gender']),
"Test Captain|testcaptainimport@mlsb.ca|M",
"Test Girl|testgirlimport@mlsb.ca|F",
"Test Boy|testboyimport@mlsb.ca|M"]
# parse the lines
result = parse_lines(lines, delimiter="|")
# expecting no warnings
self.assertEqual(result['warnings'], [], "Expected no warnings")
# check background
expected_background = {'sponsor': sponsor,
'color': color,
'captain': captain,
'league': league}
error = "Failed parsing background"
self.output(result['background'])
self.output(expected_background)
self.assertEqual(result['background'], expected_background, error)
# check header
expected_header = [HEADERS['name'],
HEADERS['email'],
HEADERS['gender']]
error = "Failed parsing header"
self.output(result['header'])
self.output(expected_header)
self.assertEqual(result['header'], expected_header, error)
# check the players
expected_players = [player.split("|") for player in lines[-3:]]
self.assertEqual(result['players'],
expected_players,
"Players not returned")
def testParseLinesWarnings(self):
"""Test a a valid file in the standard format"""
sponsor = "Test Import Sponsor"
color = "Blue"
captain = "Test Captain"
league = "Test Import League"
lines = ["{}:,{},".format(BACKGROUND['sponsor_name'], sponsor),
"{}:,{},".format(BACKGROUND['team_color'], color),
"WARNING,WARNING",
"{}:,{},".format(BACKGROUND['captain_name'], captain),
"{}:,{},".format(BACKGROUND['league_name'], league),
"{},{},{}".format(HEADERS['name'],
HEADERS['email'],
HEADERS['gender']),
"WARNING,WARNING",
"Test Captain,testcaptainimport@mlsb.ca,M",
"Test Girl,testgirlimport@mlsb.ca,F",
"WARNING,WARNING",
"Test Boy,testboyimport@mlsb.ca,M"]
# parse the lines
result = parse_lines(lines)
# check that there four warnings
expected_warnings = [INVALID_ROW.format("WARNING,WARNING"),
INVALID_ROW.format("WARNING,WARNING"),
INVALID_ROW.format("WARNING,WARNING")]
self.output(result['warnings'])
self.output(expected_warnings)
self.assertEqual(result['warnings'],
expected_warnings,
"Warnings were not returned")
# check background
expected_background = {'sponsor': sponsor,
'color': color,
'captain': captain,
'league': league}
error = "Failed parsing background"
self.output(result['background'])
self.output(expected_background)
self.assertEqual(result['background'], expected_background, error)
# check header
expected_header = [HEADERS['name'],
HEADERS['email'],
HEADERS['gender']]
error = "Failed parsing header"
self.output(result['header'])
self.output(expected_header)
self.assertEqual(result['header'], expected_header, error)
# check the players
expected_players = [["Test Captain", "testcaptainimport@mlsb.ca", "M"],
["Test Girl", "testgirlimport@mlsb.ca", "F"],
["Test Boy", "testboyimport@mlsb.ca", "M"]]
self.assertEqual(result['players'],
expected_players,
"Players not returned")
class TestTeamImportExtracingFunction(TestSetup):
def testExtractPlayerInformation(self):
"""Test extract player information"""
# the test date
name = "Test Import Parse PlayerCaptain"
email = "testImportParsePlayer@mlsb.ca"
gender = "M"
info = [name, email, gender]
# parse the information using the lookup
lookup = {"email": 1, "name": 0, "gender": 2}
result = extract_player_information(info, lookup)
# expecting the player to not be found but data parsed
self.assertEqual(result['player_id'],
None,
"Player id set for non-existent player")
self.assertEqual(result['name'],
name,
"Player name was not extracted")
self.assertEqual(result['email'],
email,
"Player email was not extracted")
self.assertEqual(result['gender'],
gender,
"Player gender was not extracted")
# now again with player in database
player = self.add_player(name, email, gender, "", True)
result = extract_player_information(info, lookup)
# expecting the player to not be found but data parsed
self.assertEqual(result['player_id'],
player['player_id'],
"Player id not set for existing player")
self.assertEqual(result['name'],
name,
"Player name was not extracted")
self.assertEqual(result['email'],
email,
"Player email was not extracted")
self.assertEqual(result['gender'],
gender,
"Player gender was not extracted")
def testExtractPlayers(self):
"""Test extracting a list of players"""
# player data to extract
player_one = {'name': "p1",
'email': "testImportPlayersOne@mlsb.ca",
'gender': "M"}
player_two = {'name': "p2",
'email': "testImportPlayersTwo@mlsb.ca",
'gender': "F"}
players = [[player_one['email'],
player_one['name'],
player_one['gender']],
[player_two['email'],
player_two['name'],
player_two['gender']]]
# extract the two players
lookup = {"email": 0, "name": 1, "gender": 2}
result = extract_players(players, lookup)
# should have two players
self.assertEqual(len(result['player_info']),
2,
"Some player was not extracted")
# should have no warnings
self.assertEqual(len(result['warnings']),
0,
"Unexpected wanring when extracting players")
# check player one
self.assertEqual(result['player_info'][0]['player_id'],
None,
"Player id set for non-existent player")
self.assertEqual(result['player_info'][0]['name'],
player_one['name'],
"Player name was not extracted")
self.assertEqual(result['player_info'][0]['email'],
player_one['email'],
"Player email was not extracted")
self.assertEqual(result['player_info'][0]['name'],
player_one['name'],
"Player name was not parsed")
# check player two
self.assertEqual(result['player_info'][1]['player_id'],
None,
"Player id set for non-existent player")
self.assertEqual(result['player_info'][1]['name'],
player_two['name'],
"Player name was not extracted")
self.assertEqual(result['player_info'][1]['email'],
player_two['email'],
"Player email was not extracted")
self.assertEqual(result['player_info'][1]['name'],
player_two['name'],
"Player name was not parsed")
def testExtractPlayersWarnings(self):
"""Test extract list of players that have warnings"""
# player data to extract
player_one = {'name': "ex. p1",
'email': "testImportPlayersOne@mlsb.ca",
'gender': "M"}
player_two = {'name': "p2",
'email': "testImportPlayersTwo@mlsb.ca",
'gender': "F"}
players = [[player_one['email'],
player_one['name'],
player_one['gender']],
[player_two['email'],
player_two['name'],
player_two['gender'],
"Extra Row"]]
# extract the two players
lookup = {"email": 0, "name": 1, "gender": 2}
result = extract_players(players, lookup)
# should have two players
self.assertEqual(len(result['player_info']),
0,
"Some player was not extracted")
# should have no warnings
self.assertEqual(len(result['warnings']),
2,
"Unexpected wanring when extracting players")
def testExtractColumnIndicesLookup(self):
"""Test extracting the lookup for fields to columns indices"""
# simple working example
header = ["Email", "name", "GeNdEr"]
lookup = extract_column_indices_lookup(header)
self.assertEqual(0, lookup['email'], "Did not extract email header")
self.assertEqual(1, lookup['name'], "Did not extract name header")
self.assertEqual(2, lookup['gender'], "Did not extract gender header")
try:
header = ["Email", "name"]
lookup = extract_column_indices_lookup(header)
self.assertTrue(False, "Should have raised exception")
except InvalidField:
pass
try:
header = ["Email", "gender"]
lookup = extract_column_indices_lookup(header)
self.assertTrue(False, "Should have raised exception")
except InvalidField:
pass
try:
header = ["name", "gender"]
lookup = extract_column_indices_lookup(header)
self.assertTrue(False, "Should have raised exception")
except InvalidField:
pass
class TestTeamImportExtractBackground(TestSetup):
def testExtractBackgroundErrors(self):
"""Test that errors are raised for incomplete background """
# some date to use through out test
sponsor = "TTIEB Non-existent sponsor"
color = "Some Color"
captain = "TTIEB Non-existent player"
league = "TTIEB Non-existent league"
# missing background values
try:
extract_background({})
self.assertTrue(False, "Expecting exception raised")
except InvalidField:
pass
# given league example
background = {'sponsor': sponsor,
'color': color,
'captain': captain,
'league': "ex. League Example"}
try:
extract_background(background)
self.assertTrue(False, "Expecting exception raised")
except InvalidField:
pass
# given captain example
background = {'sponsor': sponsor,
'color': color,
'captain': "ex. captain",
'league': league}
try:
extract_background(background)
self.assertTrue(False, "Expecting exception raised")
except InvalidField:
pass
# given color example
background = {'sponsor': sponsor,
'color': "ex. color",
'captain': captain,
'league': league}
try:
extract_background(background)
self.assertTrue(False, "Expecting exception raised")
except InvalidField:
pass
# given sponsor example
background = {'sponsor': sponsor,
'color': "ex. color",
'captain': captain,
'league': league}
try:
extract_background(background)
self.assertTrue(False, "Expecting exception raised")
except InvalidField:
pass
def testExtractBackgroundCantFindSponsor(self):
"""Test extract background when cant find sponsor"""
# some date to use through out test
league = "TTIEB Non-existent league"
self.add_league(league)
sponsor = "TTIEB Non-existent sponsor"
color = "Some Color"
captain = "TTIEB Non-existent player"
background = {'sponsor': sponsor,
'color': color,
'captain': captain,
'league': league}
try:
extract_background(background)
self.assertTrue(False, "Expecting exception raised")
except SponsorDoesNotExist:
pass
def testExtractBackgroundCantFindLeague(self):
""" Test extract background when cant find league"""
# some date to use through out test
league = "TTIEB Non-existent league"
sponsor = "TTIEB Non-existent sponsor"
self.add_sponsor(sponsor)
color = "Some Color"
captain = "TTIEB Non-existent player"
background = {'sponsor': sponsor,
'color': color,
'captain': captain,
'league': league}
try:
extract_background(background)
self.assertTrue(False, "Expecting exception raised")
except LeagueDoesNotExist:
pass
def testExtractBackgroundNewTeam(self):
"""Test extract background for a new team"""
# some date to use through out test
league = "TTIEB Non-existent league"
sponsor = "TTIEB Non-existent sponsor"
self.add_sponsor(sponsor)
self.add_league(league)
color = "Some Color"
captain = "TTIEB Non-existent player"
background = {'sponsor': sponsor,
'color': color,
'captain': captain,
'league': league}
# extract the background
result = extract_background(background)
# make sure the values match what was given
self.assertEqual(result['sponsor']['sponsor_name'],
sponsor,
"Extracted wrong sponsor")
self.assertEqual(result['league']['league_name'],
league,
"Extracted wrong league")
self.assertEqual(result['team']['color'],
color,
"Extracted wrong color")
self.assertEqual(result['captain']['player_name'],
captain,
"Extract wrong captain")
def testExtractBackgroundExistingTeam(self):
"""Test extract background for an existing team"""
# some date to use through out test
league_name = "TTIEB Non-existent league"
sponsor_name = "TTIEB Non-existent sponsor"
color = "Some Color"
sponsor = self.add_sponsor(sponsor_name)
league = self.add_league(league_name)
team = self.add_team(color, sponsor, league, date.today().year)
captain = "TTIEB Non-existent player"
background = {'sponsor': sponsor_name,
'color': color,
'captain': captain,
'league': league_name}
# extract the background
result = extract_background(background)
# make sure the values match what was given
self.assertEqual(result['sponsor']['sponsor_name'],
sponsor_name,
"Extracted wrong sponsor")
self.assertEqual(result['league']['league_name'],
league_name,
"Extracted wrong league")
self.assertEqual(result['team']['color'],
color,
"Extracted wrong color")
self.assertEqual(result['team']['team_id'],
team["team_id"],
"Extracted wrong existing team")
self.assertEqual(result['captain']['player_name'],
captain,
"Extract wrong captain")
class TestTeamImportAddTeam(TestSetup):
def testAddTeamAlreadyExists(self):
"""Import a team that already exists"""
# the testing lines
sponsor = "Test Import Sponsor"
color = "Blue"
captain = "Test Captain"
league = "Test Import League"
lines = ["{}:,{},".format(BACKGROUND['sponsor_name'], sponsor),
"{}:,{},".format(BACKGROUND['team_color'], color),
"{}:,{},".format(BACKGROUND['captain_name'], captain),
"{}:,{},".format(BACKGROUND['league_name'], league),
"{},{},{}".format(HEADERS['name'],
HEADERS['email'],
HEADERS['gender']),
"Test Captain,testcaptainimport@mlsb.ca,M",
"Test Girl,testgirlimport@mlsb.ca,F",
"Test Boy,testboyimport@mlsb.ca,M"]
# added the needed background
sponsor = self.add_sponsor(sponsor)
league = self.add_league(league)
team = self.add_team(color, sponsor, league, date.today().year)
# import the a test team
importer = TeamList(lines, session=TestImportMockSession(self))
importer.add_team_functional()
self.assertEqual(importer.warnings, [], "Importing team gave warnings")
team = Team.query.get(team['team_id'])
self.assertEqual(len(team.players),
3,
"Importing team players were not created")
def testAddTeam(self):
"""Import a team that already exists"""
# the testing lines
sponsor = "Test Import Sponsor"
color = "Blue"
captain = "Test Captain"
league = "Test Import League"
lines = ["{}:,{},".format(BACKGROUND['sponsor_name'], sponsor),
"{}:,{},".format(BACKGROUND['team_color'], color),
"{}:,{},".format(BACKGROUND['captain_name'], captain),
"{}:,{},".format(BACKGROUND['league_name'], league),
"{},{},{}".format(HEADERS['name'],
HEADERS['email'],
HEADERS['gender']),
"Test Captain,testcaptainimport@mlsb.ca,M",
"Test Girl,testgirlimport@mlsb.ca,F",
"Test Boy,testboyimport@mlsb.ca,M"]
# added the needed background
sponsor = self.add_sponsor(sponsor)
league = self.add_league(league)
# import the a test team
importer = TeamList(lines, session=TestImportMockSession(self))
importer.add_team_functional()
self.assertEqual(importer.warnings, [], "Importing team gave warnings")
teams = (Team.query
.filter(func.lower(Team.color) == func.lower(color))
.filter(Team.sponsor_id == sponsor['sponsor_id'])
.filter(Team.year == date.today().year)).all()
self.assertTrue(len(teams) > 0, "Import team was not created")
team = teams[0]
self.assertEqual(len(team.players),
3,
"Importing team players were not created")
def testAddTeamPlayerAlreadyExists(self):
"""Import a team where one player already exists"""
# the testing lines
sponsor = "Test Import Sponsor"
color = "Blue"
captain = "Test Captain"
league = "Test Import League"
player_email = "testgirlimport@mlsb.ca"
player_name = "Test Girl"
player_gender = "F"
lines = ["{}:,{},".format(BACKGROUND['sponsor_name'], sponsor),
"{}:,{},".format(BACKGROUND['team_color'], color),
"{}:,{},".format(BACKGROUND['captain_name'], captain),
"{}:,{},".format(BACKGROUND['league_name'], league),
"{},{},{}".format(HEADERS['name'],
HEADERS['email'],
HEADERS['gender']),
"Test Captain,testcaptainimport@mlsb.ca,M",
"{},{},{}".format(player_name, player_email, player_gender)]
# added the needed background
sponsor = self.add_sponsor(sponsor)
league = self.add_league(league)
player = self.add_player(player_name,
player_email,
gender=player_gender)
# import the a test team
importer = TeamList(lines, session=TestImportMockSession(self))
importer.add_team_functional()
self.assertEqual(importer.warnings, [], "Importing team gave warnings")
teams = (Team.query
.filter(func.lower(Team.color) == func.lower(color))
.filter(Team.sponsor_id == sponsor['sponsor_id'])
.filter(Team.year == date.today().year)).all()
self.assertTrue(len(teams) > 0, "Import team was not created")
team = teams[0]
self.assertEqual(len(team.players),
2,
"Importing team players were not created")
player_ids = [p.id for p in team.players]
self.assertTrue(player['player_id'] in player_ids,
"Import team existing player not added")
| 40.580552
| 79
| 0.525787
| 2,482
| 27,960
| 5.842063
| 0.091459
| 0.055862
| 0.059379
| 0.017448
| 0.809517
| 0.783379
| 0.765931
| 0.742483
| 0.735103
| 0.728276
| 0
| 0.003068
| 0.358906
| 27,960
| 688
| 80
| 40.639535
| 0.805858
| 0.07897
| 0
| 0.777567
| 0
| 0
| 0.226831
| 0.034368
| 0
| 0
| 0
| 0
| 0.127376
| 1
| 0.030418
| false
| 0.022814
| 0.138783
| 0
| 0.176806
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f2910f6201bfc6dc468a16bea0ce66dd77ab1c88
| 5,443
|
py
|
Python
|
lib/bes/text/sentence_lexer.py
|
reconstruir/bes
|
82ff54b2dadcaef6849d7de424787f1dedace85c
|
[
"Apache-2.0"
] | null | null | null |
lib/bes/text/sentence_lexer.py
|
reconstruir/bes
|
82ff54b2dadcaef6849d7de424787f1dedace85c
|
[
"Apache-2.0"
] | null | null | null |
lib/bes/text/sentence_lexer.py
|
reconstruir/bes
|
82ff54b2dadcaef6849d7de424787f1dedace85c
|
[
"Apache-2.0"
] | null | null | null |
#-*- coding:utf-8; mode:python; indent-tabs-mode: nil; c-basic-offset: 2; tab-width: 2 -*-
import string
from .string_lexer import *
from .lexer_token import lexer_token
class sentence_lexer(string_lexer):
TOKEN_PUNCTUATION = 'punctuation'
_PUNCTUATION_CHARS = [ c for c in string.punctuation if c not in [ '_', '#' ] ]
def __init__(self, log_tag, options):
super(sentence_lexer, self).__init__(log_tag, options)
self.STATE_BEGIN = _state_begin(self)
self.STATE_DONE = string_lexer_state_done(self)
self.STATE_STRING = _state_string(self)
self.STATE_SPACE = _state_space(self)
self.STATE_SINGLE_QUOTED_STRING = string_lexer_state_single_quoted_string(self)
self.STATE_DOUBLE_QUOTED_STRING = string_lexer_state_double_quoted_string(self)
self.STATE_COMMENT = string_lexer_state_comment(self)
self.state = self.STATE_BEGIN
@classmethod
def tokenize(clazz, text, log_tag = 'sentence_lexer', options = None):
return clazz(log_tag, options)._run(text)
def make_token_punctuation(self, c):
return lexer_token(self.TOKEN_PUNCTUATION, c, self.position)
@classmethod
def is_punctuation(clazz, c):
return c in clazz._PUNCTUATION_CHARS
class _state_begin(string_lexer_state):
def __init__(self, lexer):
super(_state_begin, self).__init__(lexer)
def handle_char(self, c):
self.log_handle_char(c)
new_state = None
tokens = []
if self.lexer.is_escaping:
self.lexer.buffer_reset(c)
new_state = self.lexer.STATE_STRING
elif c.isspace():
self.lexer.buffer_reset(c)
new_state = self.lexer.STATE_SPACE
elif c == self.lexer.COMMENT_CHAR and not self.lexer.ignore_comments:
self.lexer.buffer_reset(c)
new_state = self.lexer.STATE_COMMENT
elif c == self.lexer.EOS:
new_state = self.lexer.STATE_DONE
elif c == self.lexer.SINGLE_QUOTE_CHAR:
self.lexer.buffer_reset_with_quote(c)
new_state = self.lexer.STATE_SINGLE_QUOTED_STRING
elif c == self.lexer.DOUBLE_QUOTE_CHAR:
self.lexer.buffer_reset_with_quote(c)
new_state = self.lexer.STATE_DOUBLE_QUOTED_STRING
elif self.lexer.is_punctuation(c):
tokens.append(self.lexer.make_token_punctuation(c))
new_state = self.lexer.STATE_BEGIN
else:
self.lexer.buffer_reset(c)
new_state = self.lexer.STATE_STRING
self.lexer.change_state(new_state, c)
return tokens
class _state_space(string_lexer_state):
def __init__(self, lexer):
super(_state_space, self).__init__(lexer)
def handle_char(self, c):
self.log_handle_char(c)
new_state = None
tokens = []
if c.isspace():
self.lexer.buffer_write(c)
new_state = self.lexer.STATE_SPACE
elif not self.lexer.is_escaping and (c == self.lexer.COMMENT_CHAR and not self.lexer.ignore_comments):
tokens.append(self.lexer.make_token_space())
self.lexer.buffer_reset(c)
new_state = self.lexer.STATE_COMMENT
elif c == self.lexer.EOS:
tokens.append(self.lexer.make_token_space())
new_state = self.lexer.STATE_DONE
elif not self.lexer.is_escaping and c == self.lexer.SINGLE_QUOTE_CHAR:
tokens.append(self.lexer.make_token_space())
self.lexer.buffer_reset_with_quote(c)
new_state = self.lexer.STATE_SINGLE_QUOTED_STRING
elif not self.lexer.is_escaping and c == self.lexer.DOUBLE_QUOTE_CHAR:
tokens.append(self.lexer.make_token_space())
self.lexer.buffer_reset_with_quote(c)
new_state = self.lexer.STATE_DOUBLE_QUOTED_STRING
elif not self.lexer.is_escaping and self.lexer.is_punctuation(c):
tokens.append(self.lexer.make_token_space())
tokens.append(self.lexer.make_token_punctuation(c))
self.lexer.buffer_reset()
new_state = self.lexer.STATE_BEGIN
else:
tokens.append(self.lexer.make_token_space())
self.lexer.buffer_reset(c)
new_state = self.lexer.STATE_STRING
self.lexer.change_state(new_state, c)
return tokens
class _state_string(string_lexer_state):
def __init__(self, lexer):
super(_state_string, self).__init__(lexer)
def handle_char(self, c):
self.log_handle_char(c)
new_state = None
tokens = []
if c == self.lexer.EOS:
tokens.append(self.lexer.make_token_string())
new_state = self.lexer.STATE_DONE
elif self.lexer.is_escaping:
self.lexer.buffer_write(c)
new_state = self.lexer.STATE_STRING
elif c.isspace():
tokens.append(self.lexer.make_token_string())
self.lexer.buffer_reset(c)
new_state = self.lexer.STATE_SPACE
elif c == self.lexer.COMMENT_CHAR and not self.lexer.ignore_comments:
tokens.append(self.lexer.make_token_string())
self.lexer.buffer_reset(c)
new_state = self.lexer.STATE_COMMENT
elif c == self.lexer.SINGLE_QUOTE_CHAR:
self.lexer.buffer_write_quote(c)
new_state = self.lexer.STATE_SINGLE_QUOTED_STRING
elif c == self.lexer.DOUBLE_QUOTE_CHAR:
self.lexer.buffer_write_quote(c)
new_state = self.lexer.STATE_DOUBLE_QUOTED_STRING
elif not self.lexer.is_escaping and self.lexer.is_punctuation(c):
tokens.append(self.lexer.make_token_string())
tokens.append(self.lexer.make_token_punctuation(c))
self.lexer.buffer_reset()
new_state = self.lexer.STATE_BEGIN
else:
self.lexer.buffer_write(c)
new_state = self.lexer.STATE_STRING
self.lexer.change_state(new_state, c)
return tokens
| 36.530201
| 106
| 0.721845
| 797
| 5,443
| 4.595985
| 0.089084
| 0.211302
| 0.075348
| 0.106743
| 0.791428
| 0.757303
| 0.754027
| 0.715261
| 0.71062
| 0.676222
| 0
| 0.000669
| 0.176006
| 5,443
| 148
| 107
| 36.777027
| 0.816054
| 0.016351
| 0
| 0.72093
| 0
| 0
| 0.005044
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.077519
| false
| 0
| 0.023256
| 0.023256
| 0.193798
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4b2f818523afa1ef2b4266c551d0bc54125265fc
| 47,657
|
py
|
Python
|
evaluations/evaluate_for_all_datasets.py
|
MoritzWillig/flowbias
|
d08e1d8cd250ed147060d374f648e39a23ef16f5
|
[
"Apache-2.0"
] | null | null | null |
evaluations/evaluate_for_all_datasets.py
|
MoritzWillig/flowbias
|
d08e1d8cd250ed147060d374f648e39a23ef16f5
|
[
"Apache-2.0"
] | null | null | null |
evaluations/evaluate_for_all_datasets.py
|
MoritzWillig/flowbias
|
d08e1d8cd250ed147060d374f648e39a23ef16f5
|
[
"Apache-2.0"
] | null | null | null |
import json
import sys
import os
from argparse import Namespace
from datetime import datetime
import torch
import torch.utils.data as data
import numpy as np
from flowbias.datasets.flyingchairs import FlyingChairsTrain, FlyingChairsValid, FlyingChairsFull
from flowbias.datasets.flyingThings3D import FlyingThings3dCleanTrain, FlyingThings3dCleanValid
from flowbias.datasets.kitti_combined import KittiComb2015Train, KittiComb2015Val, KittiComb2015Test
from flowbias.datasets.sintel import SintelTrainingCleanTrain, SintelTrainingCleanValid, SintelTrainingCleanFull, \
SintelTrainingFinalTrain, SintelTrainingFinalValid, SintelTrainingFinalFull
from flowbias.datasets.middlebury import MiddleburyTrainValid
from flowbias.models import PWCNet, FlowNet1S, PWCNetConv33Fusion, PWCNetX1Zero, PWCNetWOX1Connection, \
CTSKPWCExpertNet02, CTSKPWCExpertNetAdd01, PWCNetDSEncoder, PWCNetWOX1ConnectionExt, CTSPWCExpertNetAdd01, \
CTSKPWCExpertNet02WOX1, CTSKPWCExpertNetWOX1Add01, CTSPWCExpertNetWOX1Add01,\
CTSKPWCExpertNetWOX1Add01EncoderExpertsOnly, CTSKPWCExpertNetWOX1LinAdd01, PWCNetResidualFlow, \
PWCNetWOX1SecondaryFlow
from flowbias.utils.meta_infrastructure import get_available_datasets, dataset_needs_batch_size_one
from flowbias.utils.model_loading import load_model_parameters, sample_to_torch_batch
from flowbias.losses import MultiScaleEPE_PWC, MultiScaleEPE_FlowNet, MultiScaleSparseEPE_PWC, MultiScaleSparseEPE_FlowNet
from flowbias.utils.statistics import SeriesStatistic
from torch.utils.data.dataloader import DataLoader
"""
Computes the average epe of a model for all datasets.
evaluate_for_all_datasets /path_to/model_checkpoint.ckpt networkName
"""
def contains_nan(results):
if isinstance(results, list):
return np.isnan(results).any()
if isinstance(results, dict):
return any([contains_nan(val) for val in results.values()])
class DataEnricher(data.Dataset):
def __init__(self, dataset, additional):
self._dataset = dataset
self._additional = additional
def __getitem__(self, index):
return {**self._dataset[index], **self._additional}
def __len__(self):
return len(self._dataset)
class CTSKDatasetDetector(DataEnricher):
# this are the dataset indices used by the CTSKTrain CombinedDataset and CTSKTrainDatasetBatchSampler
_known_datasets = [
[FlyingChairsTrain, 0],
[FlyingChairsValid, 0],
[FlyingChairsFull, 0],
[FlyingThings3dCleanTrain, 1],
[FlyingThings3dCleanValid, 1],
[SintelTrainingCleanTrain, 2],
[SintelTrainingCleanValid, 2],
[SintelTrainingCleanFull, 2],
[SintelTrainingFinalTrain, 2],
[SintelTrainingFinalValid, 2],
[SintelTrainingFinalFull, 2],
[KittiComb2015Train, 3],
[KittiComb2015Val, 3],
[KittiComb2015Test, 3],
[MiddleburyTrainValid, -1]
]
def _detect_dataset_id(self, dataset):
dataset_id = -1
for dataset_data in CTSKDatasetDetector._known_datasets:
if isinstance(dataset, dataset_data[0]):
#print("detected ", dataset_data[0])
dataset_id = dataset_data[1]
#if dataset_id == -1:
# raise ValueError("Unknown dataset!")
return dataset_id
def __init__(self, dataset, additional):
super().__init__(dataset, {"dataset": self._detect_dataset_id(dataset), **additional})
if __name__ == '__main__':
print(datetime.now().strftime("[%d-%b-%Y (%H:%M:%S)]"), "preparing ...")
args = Namespace(**{
"batch_size": None,
"cuda": True
})
model_classes = {
"PWCNet": [PWCNet, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}],
"FlowNet1S": [FlowNet1S, {"default": MultiScaleEPE_FlowNet, "kitti2015Train": MultiScaleSparseEPE_FlowNet, "kitti2015Valid": MultiScaleSparseEPE_FlowNet, "kitti2015Test": MultiScaleSparseEPE_FlowNet, "middleburyTrain":MultiScaleSparseEPE_FlowNet}],
"PWCNetConv33Fusion": [PWCNetConv33Fusion, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}],
"PWCNetX1Zero": [PWCNetX1Zero, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}],
"PWCNetWOX1Connection": [PWCNetWOX1Connection, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}],
"PWCNetWOX1ConnectionExt": [PWCNetWOX1ConnectionExt, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}],
# CTKS expert split models
"CTSKPWCExpertNet02Known": [CTSKPWCExpertNet02, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [CTSKDatasetDetector, {}]],
"CTSKPWCExpertNet02Expert0": [CTSKPWCExpertNet02, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 0}]],
"CTSKPWCExpertNet02Expert1": [CTSKPWCExpertNet02, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 1}]],
"CTSKPWCExpertNet02Expert2": [CTSKPWCExpertNet02, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 2}]],
"CTSKPWCExpertNet02Expert3": [CTSKPWCExpertNet02, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 3}]],
# CTKS expert add models
"CTSKPWCExpertNet01AddKnown": [CTSKPWCExpertNetAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [CTSKDatasetDetector, {}]],
"CTSKPWCExpertNet01AddNoExpert": [CTSKPWCExpertNetAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": -1}]],
"CTSKPWCExpertNet01AddExpert0": [CTSKPWCExpertNetAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 0}]],
"CTSKPWCExpertNet01AddExpert1": [CTSKPWCExpertNetAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 1}]],
"CTSKPWCExpertNet01AddExpert2": [CTSKPWCExpertNetAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 2}]],
"CTSKPWCExpertNet01AddExpert3": [CTSKPWCExpertNetAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 3}]],
# CTKS WOX1 expert split models
"CTSKPWCExpertNet02WOX1Known": [CTSKPWCExpertNet02WOX1, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [CTSKDatasetDetector, {}]],
"CTSKPWCExpertNet02WOX1Expert0": [CTSKPWCExpertNet02WOX1, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 0}]],
"CTSKPWCExpertNet02WOX1Expert1": [CTSKPWCExpertNet02WOX1, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 1}]],
"CTSKPWCExpertNet02WOX1Expert2": [CTSKPWCExpertNet02WOX1, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 2}]],
"CTSKPWCExpertNet02WOX1Expert3": [CTSKPWCExpertNet02WOX1, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 3}]],
# CTKS WOX1 expert add models
"CTSKPWCExpertNet01WOX1AddKnown": [CTSKPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [CTSKDatasetDetector, {}]],
"CTSKPWCExpertNet01WOX1AddNoExpert": [CTSKPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": -1}]],
"CTSKPWCExpertNet01WOX1AddExpert0": [CTSKPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 0}]],
"CTSKPWCExpertNet01WOX1AddExpert1": [CTSKPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 1}]],
"CTSKPWCExpertNet01WOX1AddExpert2": [CTSKPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 2}]],
"CTSKPWCExpertNet01WOX1AddExpert3": [CTSKPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 3}]],
# CTKS WOX1 expert linAdd models
"CTKSPWCExpertLinAddNet01WOX1Known": [CTSKPWCExpertNetWOX1LinAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [CTSKDatasetDetector, {}]],
"CTKSPWCExpertLinAddNet01WOX1NoExpert": [CTSKPWCExpertNetWOX1LinAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": -1}]],
"CTKSPWCExpertLinAddNet01WOX1Expert0": [CTSKPWCExpertNetWOX1LinAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 0}]],
"CTKSPWCExpertLinAddNet01WOX1Expert1": [CTSKPWCExpertNetWOX1LinAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 1}]],
"CTKSPWCExpertLinAddNet01WOX1Expert2": [CTSKPWCExpertNetWOX1LinAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 2}]],
"CTKSPWCExpertLinAddNet01WOX1Expert3": [CTSKPWCExpertNetWOX1LinAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 3}]],
#CTS Expert add Models
"CTSPWCExpertNet01AddKnown": [CTSPWCExpertNetAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [CTSKDatasetDetector, {}]],
"CTSPWCExpertNet01AddNoExpert": [CTSPWCExpertNetAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": -1}]],
"CTSPWCExpertNet01AddExpert0": [CTSPWCExpertNetAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 0}]],
"CTSPWCExpertNet01AddExpert1": [CTSPWCExpertNetAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 1}]],
"CTSPWCExpertNet01AddExpert2": [CTSPWCExpertNetAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 2}]],
# CTS WOX1 expert add models
"CTSPWCExpertNet01WOX1AddKnown": [CTSPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [CTSKDatasetDetector, {}]],
"CTSPWCExpertNet01WOX1AddNoExpert": [CTSPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": -1}]],
"CTSPWCExpertNet01WOX1AddExpert0": [CTSPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 0}]],
"CTSPWCExpertNet01WOX1AddExpert1": [CTSPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 1}]],
"CTSPWCExpertNet01WOX1AddExpert2": [CTSPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 2}]],
# CTKS WOX1 expert add models encoder only
"CTSKPWCExpertNetWOX1Add01EncoderExpertsOnlyKnown": [CTSKPWCExpertNetWOX1Add01EncoderExpertsOnly, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [CTSKDatasetDetector, {}]],
"CTSKPWCExpertNetWOX1Add01EncoderExpertsOnlyNoExpert": [CTSKPWCExpertNetWOX1Add01EncoderExpertsOnly, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": -1}]],
"CTSKPWCExpertNetWOX1Add01EncoderExpertsOnlyExpert0": [CTSKPWCExpertNetWOX1Add01EncoderExpertsOnly, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 0}]],
"CTSKPWCExpertNetWOX1Add01EncoderExpertsOnlyExpert1": [CTSKPWCExpertNetWOX1Add01EncoderExpertsOnly, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 1}]],
"CTSKPWCExpertNetWOX1Add01EncoderExpertsOnlyExpert2": [CTSKPWCExpertNetWOX1Add01EncoderExpertsOnly, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 2}]],
"CTSKPWCExpertNetWOX1Add01EncoderExpertsOnlyExpert3": [CTSKPWCExpertNetWOX1Add01EncoderExpertsOnly, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"dataset": 3}]],
# DS Encoder
"PWCNetDSEncoder": [PWCNetDSEncoder, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}],
# pwc secondary flow
"PWCNetWOX1SecondaryFlow": [PWCNetWOX1SecondaryFlow, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}],
# Residual Flow
"PWCNetResidualFlow": [PWCNetResidualFlow, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}],
# CTS fused expert add models
"CTSPWCExpertNet01AddExpert00": [CTSPWCExpertNetAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 0, "decoder_expert_id": 0, "context_expert_id": 0}]],
"CTSPWCExpertNet01AddExpert01": [CTSPWCExpertNetAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 0, "decoder_expert_id": 1, "context_expert_id": 1}]],
"CTSPWCExpertNet01AddExpert02": [CTSPWCExpertNetAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 0, "decoder_expert_id": 2, "context_expert_id": 2}]],
"CTSPWCExpertNet01AddExpert10": [CTSPWCExpertNetAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 1, "decoder_expert_id": 0, "context_expert_id": 0}]],
"CTSPWCExpertNet01AddExpert11": [CTSPWCExpertNetAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 1, "decoder_expert_id": 1, "context_expert_id": 1}]],
"CTSPWCExpertNet01AddExpert12": [CTSPWCExpertNetAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 1, "decoder_expert_id": 2, "context_expert_id": 2}]],
"CTSPWCExpertNet01AddExpert20": [CTSPWCExpertNetAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 2, "decoder_expert_id": 0, "context_expert_id": 0}]],
"CTSPWCExpertNet01AddExpert21": [CTSPWCExpertNetAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 2, "decoder_expert_id": 1, "context_expert_id": 1}]],
"CTSPWCExpertNet01AddExpert22": [CTSPWCExpertNetAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 2, "decoder_expert_id": 2, "context_expert_id": 2}]],
#CTKS WOX1 fused expert split models
"CTSKPWCExpertNet02WOX1Expert00": [CTSKPWCExpertNet02WOX1, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 0, "decoder_expert_id": 0, "context_expert_id": 0}]],
"CTSKPWCExpertNet02WOX1Expert01": [CTSKPWCExpertNet02WOX1, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 0, "decoder_expert_id": 1, "context_expert_id": 1}]],
"CTSKPWCExpertNet02WOX1Expert02": [CTSKPWCExpertNet02WOX1, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 0, "decoder_expert_id": 2, "context_expert_id": 2}]],
"CTSKPWCExpertNet02WOX1Expert03": [CTSKPWCExpertNet02WOX1, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 0, "decoder_expert_id": 3, "context_expert_id": 3}]],
"CTSKPWCExpertNet02WOX1Expert10": [CTSKPWCExpertNet02WOX1, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 1, "decoder_expert_id": 0, "context_expert_id": 0}]],
"CTSKPWCExpertNet02WOX1Expert11": [CTSKPWCExpertNet02WOX1, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 1, "decoder_expert_id": 1, "context_expert_id": 1}]],
"CTSKPWCExpertNet02WOX1Expert12": [CTSKPWCExpertNet02WOX1, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 1, "decoder_expert_id": 2, "context_expert_id": 2}]],
"CTSKPWCExpertNet02WOX1Expert13": [CTSKPWCExpertNet02WOX1, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 1, "decoder_expert_id": 3, "context_expert_id": 3}]],
"CTSKPWCExpertNet02WOX1Expert20": [CTSKPWCExpertNet02WOX1, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 2, "decoder_expert_id": 0, "context_expert_id": 0}]],
"CTSKPWCExpertNet02WOX1Expert21": [CTSKPWCExpertNet02WOX1, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 2, "decoder_expert_id": 1, "context_expert_id": 1}]],
"CTSKPWCExpertNet02WOX1Expert22": [CTSKPWCExpertNet02WOX1, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 2, "decoder_expert_id": 2, "context_expert_id": 2}]],
"CTSKPWCExpertNet02WOX1Expert23": [CTSKPWCExpertNet02WOX1, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 2, "decoder_expert_id": 3, "context_expert_id": 3}]],
"CTSKPWCExpertNet02WOX1Expert30": [CTSKPWCExpertNet02WOX1, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 3, "decoder_expert_id": 0, "context_expert_id": 0}]],
"CTSKPWCExpertNet02WOX1Expert31": [CTSKPWCExpertNet02WOX1, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 3, "decoder_expert_id": 1, "context_expert_id": 1}]],
"CTSKPWCExpertNet02WOX1Expert32": [CTSKPWCExpertNet02WOX1, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 3, "decoder_expert_id": 2, "context_expert_id": 2}]],
"CTSKPWCExpertNet02WOX1Expert33": [CTSKPWCExpertNet02WOX1, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 3, "decoder_expert_id": 3, "context_expert_id": 3}]],
#CTKS WOX1 fused expert add models
"CTSKPWCExpertNet01WOX1AddExpert00": [CTSKPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 0, "decoder_expert_id": 0, "context_expert_id": 0}]],
"CTSKPWCExpertNet01WOX1AddExpert01": [CTSKPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 0, "decoder_expert_id": 1, "context_expert_id": 1}]],
"CTSKPWCExpertNet01WOX1AddExpert02": [CTSKPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 0, "decoder_expert_id": 2, "context_expert_id": 2}]],
"CTSKPWCExpertNet01WOX1AddExpert03": [CTSKPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 0, "decoder_expert_id": 3, "context_expert_id": 3}]],
"CTSKPWCExpertNet01WOX1AddExpert10": [CTSKPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 1, "decoder_expert_id": 0, "context_expert_id": 0}]],
"CTSKPWCExpertNet01WOX1AddExpert11": [CTSKPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 1, "decoder_expert_id": 1, "context_expert_id": 1}]],
"CTSKPWCExpertNet01WOX1AddExpert12": [CTSKPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 1, "decoder_expert_id": 2, "context_expert_id": 2}]],
"CTSKPWCExpertNet01WOX1AddExpert13": [CTSKPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 1, "decoder_expert_id": 3, "context_expert_id": 3}]],
"CTSKPWCExpertNet01WOX1AddExpert20": [CTSKPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 2, "decoder_expert_id": 0, "context_expert_id": 0}]],
"CTSKPWCExpertNet01WOX1AddExpert21": [CTSKPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 2, "decoder_expert_id": 1, "context_expert_id": 1}]],
"CTSKPWCExpertNet01WOX1AddExpert22": [CTSKPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 2, "decoder_expert_id": 2, "context_expert_id": 2}]],
"CTSKPWCExpertNet01WOX1AddExpert23": [CTSKPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 2, "decoder_expert_id": 3, "context_expert_id": 3}]],
"CTSKPWCExpertNet01WOX1AddExpert30": [CTSKPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 3, "decoder_expert_id": 0, "context_expert_id": 0}]],
"CTSKPWCExpertNet01WOX1AddExpert31": [CTSKPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 3, "decoder_expert_id": 1, "context_expert_id": 1}]],
"CTSKPWCExpertNet01WOX1AddExpert32": [CTSKPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 3, "decoder_expert_id": 2, "context_expert_id": 2}]],
"CTSKPWCExpertNet01WOX1AddExpert33": [CTSKPWCExpertNetWOX1Add01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 3, "decoder_expert_id": 3, "context_expert_id": 3}]],
#CTKS WOX1 fused expert add models
"CTSKPWCExpertNet01WOX1LinAddExpert00": [CTSKPWCExpertNetWOX1LinAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 0, "decoder_expert_id": 0, "context_expert_id": 0}]],
"CTSKPWCExpertNet01WOX1LinAddExpert01": [CTSKPWCExpertNetWOX1LinAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 0, "decoder_expert_id": 1, "context_expert_id": 1}]],
"CTSKPWCExpertNet01WOX1LinAddExpert02": [CTSKPWCExpertNetWOX1LinAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 0, "decoder_expert_id": 2, "context_expert_id": 2}]],
"CTSKPWCExpertNet01WOX1LinAddExpert03": [CTSKPWCExpertNetWOX1LinAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 0, "decoder_expert_id": 3, "context_expert_id": 3}]],
"CTSKPWCExpertNet01WOX1LinAddExpert10": [CTSKPWCExpertNetWOX1LinAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 1, "decoder_expert_id": 0, "context_expert_id": 0}]],
"CTSKPWCExpertNet01WOX1LinAddExpert11": [CTSKPWCExpertNetWOX1LinAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 1, "decoder_expert_id": 1, "context_expert_id": 1}]],
"CTSKPWCExpertNet01WOX1LinAddExpert12": [CTSKPWCExpertNetWOX1LinAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 1, "decoder_expert_id": 2, "context_expert_id": 2}]],
"CTSKPWCExpertNet01WOX1LinAddExpert13": [CTSKPWCExpertNetWOX1LinAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 1, "decoder_expert_id": 3, "context_expert_id": 3}]],
"CTSKPWCExpertNet01WOX1LinAddExpert20": [CTSKPWCExpertNetWOX1LinAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 2, "decoder_expert_id": 0, "context_expert_id": 0}]],
"CTSKPWCExpertNet01WOX1LinAddExpert21": [CTSKPWCExpertNetWOX1LinAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 2, "decoder_expert_id": 1, "context_expert_id": 1}]],
"CTSKPWCExpertNet01WOX1LinAddExpert22": [CTSKPWCExpertNetWOX1LinAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 2, "decoder_expert_id": 2, "context_expert_id": 2}]],
"CTSKPWCExpertNet01WOX1LinAddExpert23": [CTSKPWCExpertNetWOX1LinAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 2, "decoder_expert_id": 3, "context_expert_id": 3}]],
"CTSKPWCExpertNet01WOX1LinAddExpert30": [CTSKPWCExpertNetWOX1LinAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 3, "decoder_expert_id": 0, "context_expert_id": 0}]],
"CTSKPWCExpertNet01WOX1LinAddExpert31": [CTSKPWCExpertNetWOX1LinAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 3, "decoder_expert_id": 1, "context_expert_id": 1}]],
"CTSKPWCExpertNet01WOX1LinAddExpert32": [CTSKPWCExpertNetWOX1LinAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 3, "decoder_expert_id": 2, "context_expert_id": 2}]],
"CTSKPWCExpertNet01WOX1LinAddExpert33": [CTSKPWCExpertNetWOX1LinAdd01, {"default": MultiScaleEPE_PWC, "kitti2015Train": MultiScaleSparseEPE_PWC, "kitti2015Valid": MultiScaleSparseEPE_PWC, "kitti2015Test": MultiScaleSparseEPE_PWC, "middleburyTrain":MultiScaleSparseEPE_PWC}, [DataEnricher, {"encoder_expert_id": 3, "decoder_expert_id": 3, "context_expert_id": 3}]],
}
assert(len(sys.argv) == 4)
#model_path = "/data/dataB/models/R_PWCNet-A_fine_sintel-20191218-135407/checkpoint_latest.ckpt"
model_path = sys.argv[1]
model_class_name = sys.argv[2]
result_file_path = sys.argv[3]
print(model_path, "with", model_class_name)
with torch.no_grad():
model_class = model_classes[model_class_name][0]
model = model_class(args)
load_model_parameters(model, model_path)
model.eval().cuda()
rename = {
"flyingChairs": "flyingChairsValid",
"flyingThings": "flyingThingsCleanValid",
"kitti": "kittiValid",
"kittiValid": "kitti2015Valid",
"sintelClean": "sintelCleanValid",
"sintelFinal": "sintelFinalValid",
}
# load existing results
has_old_names = False
if os.path.isfile(result_file_path):
with open(result_file_path, "r") as f:
existing_results_x = json.loads(f.read())
# rename old keys and skip non-dataset entries
existing_results = {}
for key, value in existing_results_x.items():
if key in ["model_path", "model_class_name"]:
# check if the file contains old model_class_names
if key == "model_class_name" and value not in model_class_name:
has_old_names = True
continue
if key in rename:
existing_results[rename[key]] = value
has_old_names = True
else:
existing_results[key] = value
else:
# no existing results
existing_results = {}
existing_results_datasets = list(existing_results.keys())
# compute remaining evaluations
#reevaluate = ["kitti2015Train", "kitti2015Valid"] # forces datasets to be reevaluated
#reevaluate = ["middleburyTrain"]
reevaluate = []
reevaluate_only = False
reevaluate_nans = True
available_dataset_names = get_available_datasets(force_mode="test", select_by_any_tag=["train", "valid"], run_dry=True)
missing_dataset_names = [
dataset_name for dataset_name in available_dataset_names
if (((dataset_name not in existing_results_datasets) or
(reevaluate_nans and contains_nan(existing_results[dataset_name]))) and
(not reevaluate_only)) or (dataset_name in reevaluate)]
print("available_datasets:", list(available_dataset_names))
print("existing results:", list(existing_results.keys()))
print("computing results for:", missing_dataset_names)
datasets = get_available_datasets(force_mode="test", restrict_to=missing_dataset_names)
if len(datasets.keys()) == 0:
if has_old_names:
print("replacing old dataset or model names")
results = {"model_path": model_path, "model_class_name": model_class_name}
for key, value in existing_results.items():
results[key] = value
with open(result_file_path, "w") as f:
f.write(json.dumps(results))
print("no datasets remaining - exiting")
exit()
batch_size = 16
model_config = model_classes[model_class_name]
demo_available_dataset = next(iter(datasets.values()))
if len(model_config) > 2:
# wrap dataset into dataset enricher
enricherConfig = model_config[2]
demo_available_dataset = enricherConfig[0](demo_available_dataset, enricherConfig[1])
demo_sample = sample_to_torch_batch(demo_available_dataset[0])
demo_loss = model_classes[model_class_name][1]["default"](args).eval().cuda()
print("!!!!", model_class_name, demo_loss, model(demo_sample).keys(), demo_sample.keys())
demo_loss_values = demo_loss(model(demo_sample), demo_sample)
loss_names = list(demo_loss_values.keys())
results = {"model_path": model_path, "model_class_name": model_class_name}
for name, dataset in datasets.items():
print(datetime.now().strftime("[%d-%b-%Y (%H:%M:%S)]"), name)
loss_class = model_config[1][name] if name in model_config[1] else model_config[1]["default"]
loss = loss_class(args).eval().cuda()
if len(model_config) > 2:
# wrap dataset into dataset enricher
enricherConfig = model_config[2]
dataset = enricherConfig[0](dataset, enricherConfig[1])
losses = {name: SeriesStatistic() for name in loss_names}
dataset_size = len(dataset)
i = 0
gpuargs = {"num_workers": 4, "pin_memory": False}
loader = DataLoader(
dataset,
batch_size=1 if dataset_needs_batch_size_one(name, force_mode="test") else batch_size,
shuffle=False,
drop_last=False,
**gpuargs)
#for i in range(len(dataset)):
for sample in loader:
input_keys = list(filter(lambda x: "input" in x, sample.keys()))
target_keys = list(filter(lambda x: "target" in x, sample.keys()))
tensor_keys = input_keys + target_keys
for key, value in sample.items():
if key in tensor_keys:
sample[key] = value.cuda(non_blocking=True)
loss_values = loss(model(sample), sample)
for lname, value in loss_values.items():
b, _, _, _ = sample["target1"].size()
losses[lname].push_value(value.cpu().detach().numpy(), int(b))
#time.sleep(0.003)
#i += 1
#if i+1 % 10 == 0:
# sys.stdout.write(f"\r{i}/{dataset_size}")
# sys.stdout.flush()
#sys.stdout.write("\n")
#sys.stdout.flush()
results[name] = {}
for lname, lloss in losses.items():
statistic = lloss.get_statistics(report_individual_values=True)
results[name][lname] = statistic
print(f"{lname}: {statistic['average']}")
print(datetime.now().strftime("[%d-%b-%Y (%H:%M:%S)]"), "saving ...")
# add existing results
for key, value in existing_results.items():
# but keep newer results (in case we reevaluated a dataset)
if key in results:
continue
results[key] = value
# save
with open(result_file_path, "w") as f:
f.write(json.dumps(results))
print(datetime.now().strftime("[%d-%b-%Y (%H:%M:%S)]"), "done")
| 124.106771
| 372
| 0.768114
| 3,914
| 47,657
| 9.058252
| 0.101686
| 0.271168
| 0.070711
| 0.113753
| 0.736504
| 0.726716
| 0.724855
| 0.723868
| 0.721442
| 0.721442
| 0
| 0.056669
| 0.11652
| 47,657
| 383
| 373
| 124.430809
| 0.785389
| 0.027299
| 0
| 0.081911
| 0
| 0
| 0.305762
| 0.072451
| 0
| 0
| 0
| 0
| 0.003413
| 1
| 0.020478
| false
| 0
| 0.064846
| 0.006826
| 0.112628
| 0.040956
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4b50e361ee21aa5f9cbdb786e35e65523546d009
| 9,263
|
py
|
Python
|
beanie/api/work_centre_api.py
|
altoyield/python-beanieclient
|
448b8dd328054eaf32dd7d0bdff700e603b5c27d
|
[
"Apache-2.0"
] | null | null | null |
beanie/api/work_centre_api.py
|
altoyield/python-beanieclient
|
448b8dd328054eaf32dd7d0bdff700e603b5c27d
|
[
"Apache-2.0"
] | null | null | null |
beanie/api/work_centre_api.py
|
altoyield/python-beanieclient
|
448b8dd328054eaf32dd7d0bdff700e603b5c27d
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Beanie ERP API
An API specification for interacting with the Beanie ERP system # noqa: E501
OpenAPI spec version: 0.8
Contact: dev@bean.ie
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from beanie.api_client import ApiClient
class WorkCentreApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def find_work_centre_by_id(self, id, **kwargs): # noqa: E501
"""Find Work centre by ID # noqa: E501
Returns a single work centre if the user has access # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.find_work_centre_by_id(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: ID of work centre to fetch (required)
:return: WorkCentre
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.find_work_centre_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.find_work_centre_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def find_work_centre_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Find Work centre by ID # noqa: E501
Returns a single work centre if the user has access # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.find_work_centre_by_id_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: ID of work centre to fetch (required)
:return: WorkCentre
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_work_centre_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `find_work_centre_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/work_centres/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WorkCentre', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def find_work_centres(self, work_centre_group_id, **kwargs): # noqa: E501
"""All work centre # noqa: E501
Returns all work centre from the system that the user has access to # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.find_work_centres(work_centre_group_id, async=True)
>>> result = thread.get()
:param async bool
:param int work_centre_group_id: ID of Work Centre Group for list of Work Centres (required)
:param list[str] tags: tags to filter by
:param int limit: Maximum number of results to return
:return: list[WorkCentre]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.find_work_centres_with_http_info(work_centre_group_id, **kwargs) # noqa: E501
else:
(data) = self.find_work_centres_with_http_info(work_centre_group_id, **kwargs) # noqa: E501
return data
def find_work_centres_with_http_info(self, work_centre_group_id, **kwargs): # noqa: E501
"""All work centre # noqa: E501
Returns all work centre from the system that the user has access to # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.find_work_centres_with_http_info(work_centre_group_id, async=True)
>>> result = thread.get()
:param async bool
:param int work_centre_group_id: ID of Work Centre Group for list of Work Centres (required)
:param list[str] tags: tags to filter by
:param int limit: Maximum number of results to return
:return: list[WorkCentre]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['work_centre_group_id', 'tags', 'limit'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_work_centres" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'work_centre_group_id' is set
if ('work_centre_group_id' not in params or
params['work_centre_group_id'] is None):
raise ValueError("Missing the required parameter `work_centre_group_id` when calling `find_work_centres`") # noqa: E501
collection_formats = {}
path_params = {}
if 'work_centre_group_id' in params:
path_params['work_centre_group_id'] = params['work_centre_group_id'] # noqa: E501
query_params = []
if 'tags' in params:
query_params.append(('tags', params['tags'])) # noqa: E501
collection_formats['tags'] = 'csv' # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/work_centre_groups/{work_centre_group_id}/work_centres', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[WorkCentre]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 38.27686
| 132
| 0.616971
| 1,140
| 9,263
| 4.764912
| 0.148246
| 0.069956
| 0.052467
| 0.053203
| 0.878498
| 0.82732
| 0.812224
| 0.775037
| 0.770987
| 0.739691
| 0
| 0.017734
| 0.293857
| 9,263
| 241
| 133
| 38.435685
| 0.81272
| 0.065745
| 0
| 0.655738
| 1
| 0
| 0.182511
| 0.046263
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.032787
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4b83bb6375b82dd1b74395ded80790c2f63778d5
| 7,514
|
py
|
Python
|
tests/graphlib/test_io.py
|
networmix/NetSim
|
9a2804112895826c58399fbd47f496817916cd49
|
[
"MIT"
] | null | null | null |
tests/graphlib/test_io.py
|
networmix/NetSim
|
9a2804112895826c58399fbd47f496817916cd49
|
[
"MIT"
] | 12
|
2021-11-20T22:50:46.000Z
|
2022-01-07T02:00:38.000Z
|
tests/graphlib/test_io.py
|
networmix/NetSim
|
9a2804112895826c58399fbd47f496817916cd49
|
[
"MIT"
] | null | null | null |
# pylint: disable=protected-access,invalid-name
from netsim.graphlib.io import graph_to_node_link, node_link_to_graph, edgelist_to_graph
from netsim.graphlib.graph import MultiDiGraph
def test_graph_to_node_link_1():
g = MultiDiGraph(test_attr="TEST_graph")
g.add_node("A", test_attr="TEST_node1")
g.add_node("B", test_attr="TEST_node2")
g.add_node("C", test_attr="TEST_node3")
g.add_edge("A", "B", test_attr="TEST_edge1a")
g.add_edge("B", "A", test_attr="TEST_edge1a")
g.add_edge("A", "B", test_attr="TEST_edge1b")
g.add_edge("B", "A", test_attr="TEST_edge1b")
g.add_edge("B", "C", test_attr="TEST_edge2")
g.add_edge("C", "B", test_attr="TEST_edge2")
g.add_edge("C", "A", test_attr="TEST_edge3")
g.add_edge("A", "C", test_attr="TEST_edge3")
exp_ret = {
"graph": {"test_attr": "TEST_graph"},
"nodes": [
{"id": "A", "attr": {"test_attr": "TEST_node1"}},
{"id": "B", "attr": {"test_attr": "TEST_node2"}},
{"id": "C", "attr": {"test_attr": "TEST_node3"}},
],
"links": [
{
"source": 0,
"target": 1,
"key": 1,
"attr": {"test_attr": "TEST_edge1a"},
},
{
"source": 1,
"target": 0,
"key": 2,
"attr": {"test_attr": "TEST_edge1a"},
},
{
"source": 0,
"target": 1,
"key": 3,
"attr": {"test_attr": "TEST_edge1b"},
},
{
"source": 1,
"target": 0,
"key": 4,
"attr": {"test_attr": "TEST_edge1b"},
},
{
"source": 1,
"target": 2,
"key": 5,
"attr": {"test_attr": "TEST_edge2"},
},
{
"source": 2,
"target": 1,
"key": 6,
"attr": {"test_attr": "TEST_edge2"},
},
{
"source": 2,
"target": 0,
"key": 7,
"attr": {"test_attr": "TEST_edge3"},
},
{
"source": 0,
"target": 2,
"key": 8,
"attr": {"test_attr": "TEST_edge3"},
},
],
}
assert exp_ret == graph_to_node_link(g)
def test_node_link_to_graph_1():
data = {
"graph": {"test_attr": "TEST_graph"},
"nodes": [
{"id": "A", "attr": {"test_attr": "TEST_node1"}},
{"id": "B", "attr": {"test_attr": "TEST_node2"}},
{"id": "C", "attr": {"test_attr": "TEST_node3"}},
],
"links": [
{
"source": 0,
"target": 1,
"key": 1,
"attr": {"test_attr": "TEST_edge1a"},
},
{
"source": 1,
"target": 0,
"key": 2,
"attr": {"test_attr": "TEST_edge1a"},
},
{
"source": 0,
"target": 1,
"key": 3,
"attr": {"test_attr": "TEST_edge1b"},
},
{
"source": 1,
"target": 0,
"key": 4,
"attr": {"test_attr": "TEST_edge1b"},
},
{
"source": 1,
"target": 2,
"key": 5,
"attr": {"test_attr": "TEST_edge2"},
},
{
"source": 2,
"target": 1,
"key": 6,
"attr": {"test_attr": "TEST_edge2"},
},
{
"source": 2,
"target": 0,
"key": 7,
"attr": {"test_attr": "TEST_edge3"},
},
{
"source": 0,
"target": 2,
"key": 8,
"attr": {"test_attr": "TEST_edge3"},
},
],
}
g = node_link_to_graph(data)
assert "A" in g
assert "B" in g
assert "C" in g
assert g._edges[1] == ("A", "B", 1, {"test_attr": "TEST_edge1a"})
assert g._edges[2] == ("B", "A", 2, {"test_attr": "TEST_edge1a"})
assert g._edges[3] == ("A", "B", 3, {"test_attr": "TEST_edge1b"})
assert g._edges[4] == ("B", "A", 4, {"test_attr": "TEST_edge1b"})
assert g._edges[5] == ("B", "C", 5, {"test_attr": "TEST_edge2"})
assert g._edges[6] == ("C", "B", 6, {"test_attr": "TEST_edge2"})
assert g._edges[7] == ("C", "A", 7, {"test_attr": "TEST_edge3"})
assert g._edges[8] == ("A", "C", 8, {"test_attr": "TEST_edge3"})
def test_node_link_1():
data = {
"graph": {"test_attr": "TEST_graph"},
"nodes": [
{"id": "A", "attr": {"test_attr": "TEST_node1"}},
{"id": "B", "attr": {"test_attr": "TEST_node2"}},
{"id": "C", "attr": {"test_attr": "TEST_node3"}},
],
"links": [
{
"source": 0,
"target": 1,
"key": 1,
"attr": {"test_attr": "TEST_edge1a"},
},
{
"source": 1,
"target": 0,
"key": 2,
"attr": {"test_attr": "TEST_edge1a"},
},
{
"source": 0,
"target": 1,
"key": 3,
"attr": {"test_attr": "TEST_edge1b"},
},
{
"source": 1,
"target": 0,
"key": 4,
"attr": {"test_attr": "TEST_edge1b"},
},
{
"source": 1,
"target": 2,
"key": 5,
"attr": {"test_attr": "TEST_edge2"},
},
{
"source": 2,
"target": 1,
"key": 6,
"attr": {"test_attr": "TEST_edge2"},
},
{
"source": 2,
"target": 0,
"key": 7,
"attr": {"test_attr": "TEST_edge3"},
},
{
"source": 0,
"target": 2,
"key": 8,
"attr": {"test_attr": "TEST_edge3"},
},
],
}
assert graph_to_node_link(node_link_to_graph(data)) == data
def test_edgelist_to_graph_1():
columns = ["src", "dst", "test_attr"]
lines = [
"A B TEST_edge1a",
"B A TEST_edge1a",
"A B TEST_edge1b",
"B A TEST_edge1b",
"B C TEST_edge2",
"C B TEST_edge2",
"C A TEST_edge3",
"A C TEST_edge3",
]
g = edgelist_to_graph(lines, columns)
assert "A" in g
assert "B" in g
assert "C" in g
assert g._edges[1] == ("A", "B", 1, {"test_attr": "TEST_edge1a"})
assert g._edges[2] == ("B", "A", 2, {"test_attr": "TEST_edge1a"})
assert g._edges[3] == ("A", "B", 3, {"test_attr": "TEST_edge1b"})
assert g._edges[4] == ("B", "A", 4, {"test_attr": "TEST_edge1b"})
assert g._edges[5] == ("B", "C", 5, {"test_attr": "TEST_edge2"})
assert g._edges[6] == ("C", "B", 6, {"test_attr": "TEST_edge2"})
assert g._edges[7] == ("C", "A", 7, {"test_attr": "TEST_edge3"})
assert g._edges[8] == ("A", "C", 8, {"test_attr": "TEST_edge3"})
| 30.669388
| 88
| 0.382885
| 776
| 7,514
| 3.443299
| 0.074742
| 0.290419
| 0.287425
| 0.197605
| 0.805763
| 0.790045
| 0.790045
| 0.785554
| 0.702844
| 0.702844
| 0
| 0.040873
| 0.426936
| 7,514
| 244
| 89
| 30.795082
| 0.579656
| 0.005989
| 0
| 0.628821
| 0
| 0
| 0.25231
| 0
| 0
| 0
| 0
| 0
| 0.104803
| 1
| 0.017467
| false
| 0
| 0.008734
| 0
| 0.026201
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
4b9fc967ebb284bc8e371acf4a19889ffdd4004b
| 1,769
|
py
|
Python
|
test_giftcards.py
|
greeshmagopinath/GiftCard
|
095fc482f14ba86fcc11a5f437565fef7425102a
|
[
"MIT"
] | null | null | null |
test_giftcards.py
|
greeshmagopinath/GiftCard
|
095fc482f14ba86fcc11a5f437565fef7425102a
|
[
"MIT"
] | null | null | null |
test_giftcards.py
|
greeshmagopinath/GiftCard
|
095fc482f14ba86fcc11a5f437565fef7425102a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import sys
import subprocess
import unittest
class MainTest(unittest.TestCase):
def test_find_prices(self):
'''
quick tests for finding two items
:return:
'''
result = subprocess.check_output("python find_price.py prices.txt 2500").decode('utf-8').strip('\r\n')
self.assertEqual('Candy Bar 500, Earmuffs 2000',result)
result = subprocess.check_output("python find_price.py prices.txt 2300").decode('utf-8').strip('\r\n')
self.assertEqual(result, 'Paperback Book 700, Headphones 1400')
result = subprocess.check_output("python find_price.py prices.txt 10000").decode('utf-8').strip('\r\n')
self.assertEqual(result, 'Earmuffs 2000, Bluetooth Stereo 6000')
result = subprocess.check_output("python find_price.py prices.txt 1100").decode('utf-8').strip('\r\n')
self.assertEqual(result, 'Not possible')
def test_bonus(self):
'''
quick tests for finding three items
:return:
'''
result = subprocess.check_output("python bonus.py prices.txt 2500").decode('utf-8').strip('\r\n')
self.assertEqual('Candy Bar 500, Paperback Book 700, Detergent 1000',result)
result = subprocess.check_output("python bonus.py prices.txt 2300").decode('utf-8').strip('\r\n')
self.assertEqual(result, 'Candy Bar 500, Paperback Book 700, Detergent 1000')
result = subprocess.check_output("python bonus.py prices.txt 10000").decode('utf-8').strip('\r\n')
self.assertEqual(result, 'Headphones 1400, Earmuffs 2000, Bluetooth Stereo 6000')
result = subprocess.check_output("python bonus.py prices.txt 1100").decode('utf-8').strip('\r\n')
self.assertEqual(result, 'Not possible')
| 50.542857
| 111
| 0.668174
| 235
| 1,769
| 4.965957
| 0.251064
| 0.109683
| 0.143959
| 0.18509
| 0.849186
| 0.808055
| 0.802913
| 0.784062
| 0.784062
| 0.617823
| 0
| 0.066759
| 0.187111
| 1,769
| 34
| 112
| 52.029412
| 0.744784
| 0.061051
| 0
| 0.090909
| 0
| 0
| 0.385482
| 0
| 0
| 0
| 0
| 0
| 0.363636
| 1
| 0.090909
| false
| 0
| 0.136364
| 0
| 0.272727
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
29b2cbbd14846ce4255acb7b7e069a1301db682d
| 149
|
py
|
Python
|
Config/Config.py
|
GreatHorizon/library
|
a1370b94c2aa51825732bacecb161fe54f43124d
|
[
"MIT"
] | 1
|
2021-01-07T08:54:18.000Z
|
2021-01-07T08:54:18.000Z
|
Config/Config.py
|
GreatHorizon/library
|
a1370b94c2aa51825732bacecb161fe54f43124d
|
[
"MIT"
] | 1
|
2020-12-29T08:16:11.000Z
|
2020-12-29T08:16:11.000Z
|
Config/Config.py
|
GreatHorizon/library
|
a1370b94c2aa51825732bacecb161fe54f43124d
|
[
"MIT"
] | null | null | null |
DEFAULT_PASSWORD = '8925c482ed5f81236aa9c3492fe633303d55a7a663112f0371653a469721649712fe8aef0c08375f01d0d7d48b938b8bb2b47a1afe52b3afcb3f0a945f7fa275'
| 149
| 149
| 0.966443
| 3
| 149
| 47.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.578231
| 0.013423
| 149
| 1
| 149
| 149
| 0.394558
| 0
| 0
| 0
| 0
| 0
| 0.853333
| 0.853333
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
d9b166f49f90d2d321b9edf4a73dceafe72cacf3
| 25,754
|
py
|
Python
|
userbot/plugins/kill.py
|
udhay018777/reaperbot
|
ac2f149cddcd3e7fe2b54d14139dd3272abca4ed
|
[
"MIT"
] | 1
|
2021-04-25T00:56:37.000Z
|
2021-04-25T00:56:37.000Z
|
userbot/plugins/kill.py
|
udhay018777/reaperbot
|
ac2f149cddcd3e7fe2b54d14139dd3272abca4ed
|
[
"MIT"
] | null | null | null |
userbot/plugins/kill.py
|
udhay018777/reaperbot
|
ac2f149cddcd3e7fe2b54d14139dd3272abca4ed
|
[
"MIT"
] | 1
|
2020-06-02T00:15:59.000Z
|
2020-06-02T00:15:59.000Z
|
"""Emoji
Available Commands:
.kill"""
from telethon import events
import asyncio
from uniborg.util import admin_cmd
from telethon.tl.functions.users import GetFullUserRequest
@borg.on(admin_cmd(pattern=r"kill"))
async def _(event):
if event.fwd_from:
return
animation_interval = 2
animation_ttl = range(0, 11)
#input_str = event.pattern_match.group(1)
#if input_str == "hack":
if event.reply_to_msg_id:
reply_message = await event.get_reply_message()
replied_user = await event.client(GetFullUserRequest(reply_message.from_id))
firstname = replied_user.user.first_name
usname = replied_user.user.username
idd = reply_message.from_id
if idd==1076931233:
await event.edit("This is My Master\n**How dare you trying to tell me to kill master nigger!**\n\n__Your account is on hold! Pay 99$ to my master__ [Indian Bhai](tg://user?id=953414679) __to release your account__😏")
else:
await event.edit("killing..")
animation_chars = [
"Fiiiiire",
"( ・ิω・ิ)︻デ═一-->",
"---->____________",
"------>__________",
"-------->________",
"---------->______",
"------------>____",
"-------------->__",
"---------------->",
"------>;(^。^)ノ",
"( ̄ー ̄) DED",
"**Target killed successfully (°̥̥̥̥̥̥̥̥•̀.̫•́°̥̥̥̥̥̥̥)**"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 11])
else:
await event.edit("No User is Defined\nAre u dumb\n\nreply to a user.")
| 415.387097
| 4,033
| 0.036965
| 205
| 25,754
| 121.521951
| 0.546341
| 0.002408
| 0.002248
| 1.927264
| 0.963632
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001109
| 0.01957
| 25,754
| 61
| 4,034
| 422.196721
| 0.033545
| 0.003689
| 0
| 0.051282
| 0
| 0.025641
| 0.954545
| 0.941993
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.102564
| 0
| 0.128205
| 0
| 0
| 0
| 1
| null | 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d9b9e3e822c0b6e34cb663dc0ccda4de1fe64090
| 52,752
|
py
|
Python
|
src/hier_clust/expLink.py
|
iesl/expLinkage
|
4d46683a3eb86b4a40425acf08b608ab44f5006b
|
[
"Apache-2.0"
] | 10
|
2019-08-11T22:14:27.000Z
|
2022-03-02T12:58:37.000Z
|
src/hier_clust/expLink.py
|
iesl/expLinkage
|
4d46683a3eb86b4a40425acf08b608ab44f5006b
|
[
"Apache-2.0"
] | null | null | null |
src/hier_clust/expLink.py
|
iesl/expLinkage
|
4d46683a3eb86b4a40425acf08b608ab44f5006b
|
[
"Apache-2.0"
] | 6
|
2019-06-02T14:50:18.000Z
|
2021-08-13T07:36:29.000Z
|
"""
Copyright (C) 2019 University of Massachusetts Amherst.
This file is part of "expLinkage"
http://github.com/iesl/expLinkage
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import itertools, math, os
import numpy as np
import torch
import copy, time
from scipy.special import softmax
# Runs HAC with ExpLink for given value of linkAlpha and returns k-flatClusters and dendrogram purity
def runHAC(origDistMat, k, linkAlpha, numPoints, pidToCluster, threshold, scaleDist):
"""
Runs HAC with ExpLink for given value of linkAlpha and returns k-flatClusters and dendrogram purity
:param origDistMat: Numpy Distance Matrix
:param k: Number of clusters to use to cut tree. If None, then tree is cut using threshold value
:param linkAlpha: Value of alpha parameter of expLink
:param numPoints: Number of points
:param pidToCluster: Dictionary that maps point id to ground-truth cluster id
:param threshold: Threshold to use to cut tree. If none then tree is cut using k(given)
:param scaleDist: Should all distances in the matrix be scaled using largest edge in distance matrix? Useful of preventing numerical overflows
:return: (y_pred, dendPurity)
y_pred : List that maps pid to its cluster id. Flat Clusters obtained by cutting tree using threshold or k
dendPurity: Dendrogram purity of cluster tree build by greedy HAC with given linkAlpha
"""
assert (k is None) or (threshold is None)
assert (k is not None) or (threshold is not None)
activeClusters = [pid for pid in range(numPoints)]
newCid = numPoints
pidToParent = {}
children = {pid:None for pid in activeClusters}
if k is not None and len(activeClusters) <= k:
# Technically I should construct a tree and then compute
# dendrogram purity in this case as well, but I am not doing it...
return activeClusters, 0
distMat = getDistMat(origDistMat=origDistMat, linkAlpha=linkAlpha, numPoints=numPoints,
matType="simple", scaleDist=scaleDist, dataStructType="matrix")
y_pred = None
mergeTime = 0
updateTime = 0
while len(activeClusters) > 1:
# Find clusters to merge
t1 = time.time()
c1,c2 = findMerger_matVersion(linkAlpha=linkAlpha, distMat=distMat, currClusterList=activeClusters)
mergeTime += time.time() - t1
# Get flat clusters by cutting tree at threshold. y_pred being None means that flat clusters have not been obtained even once
# If flat clusters are obtained once by cutting the tree, then we need not do so in consecuting steps and y_pred is None condition prevents us from doingit
if (threshold is not None) and (y_pred is None):
cutTree = False
if (linkAlpha == "min" or linkAlpha == "max"):
if distMat[c1][c2] > threshold: cutTree = True
else:
if distMat[0][c1][c2]/distMat[1][c1][c2] > threshold: cutTree = True
# if len(activeClusters) == numPoints:
# print("\n\n\n\nReturning just at the beginnning..{}..{}\t{}\t{}......\n\n\n\n".format(cutTree,distMat[0][c1][c2]/distMat[1][c1][c2],distMat[0][c1][c2],distMat[1][c1][c2]))
# print("numPoints:{}".format(numPoints))
# print(origDistMat[c1][c2])
if cutTree: # Should I cut tree now
pidToPredCluster_thresh = getPidToPredClusters(numPoints=numPoints, pidToParent=pidToParent)
y_pred = [pidToPredCluster_thresh[ pid ] for pid in range(numPoints)]
if pidToCluster is None: # No need to continue performing HAC because pidToCluster is None, so dendPurity can not be computed anyway
# if len(activeClusters) == numPoints:
# print("\n\n\n\nReturning just at the beginnning..........\n\n\n\n")
return y_pred, 0
try:
# Remove merged clusters for list
activeClusters.remove(c1)
activeClusters.remove(c2)
except Exception as e:
print(c1,c2)
raise e
t1 = time.time()
# Update distances of the merged cluster with all remaining clusters
distMat = updateDistMat_matVersion(linkAlpha=linkAlpha, distMat=distMat, matType="simple",
currClusterList=activeClusters, newCid=newCid, oldC1=c1, oldC2=c2)
updateTime += time.time() - t1
activeClusters.append(newCid)
children[newCid] = (c1,c2)
pidToParent[c1] = newCid
pidToParent[c2] = newCid
if k is not None and len(activeClusters) == k: # Get flat clusters such that there are k clusters
pidToPredCluster_k = getPidToPredClusters(numPoints=numPoints, pidToParent=pidToParent)
y_pred = [pidToPredCluster_k[ pid ] for pid in range(numPoints)]
if pidToCluster is None: # No need to continue performing HAC because pidToCluster is None, so dendPurity can not be computed anyway
# print("\t\tTime taken to update distance matrix (fast):{:.4f}".format(updateTime))
# print("\t\tTime taken to merger clusters (fast):{:.4f}".format(mergeTime))
return y_pred, 0
newCid += 1
if y_pred is None: # This is triggered when while loop terminated without forming flat clusters. it means that all points are put in 1 cluster
y_pred = [1 for x in range(numPoints)]
if pidToCluster is None:
dendPurity = 0
else:
dendPurity = computeDendPurity(pidToCluster=pidToCluster, children=children, pidToParent=pidToParent)
# print("\t\tTime taken to update distance matrix :{:.4f}".format(updateTime))
# print("\t\tTime taken to merger clusters :{:.4f}".format(mergeTime))
return y_pred, dendPurity
def calc_linkage_numpy(linkAlpha, values):
if linkAlpha == "min":
return np.min(values)
elif linkAlpha == "max":
return np.max(values)
else:
weights = softmax(linkAlpha*values)
return np.sum(weights*values)
# Runs HAC with ExpLink for given value of linkAlpha and returns k-flatClusters and dendrogram purity
def runHAC_allEdges(origDistMat, k, linkAlpha, numPoints, pidToCluster, threshold, scaleDist):
"""
Runs HAC with ExpLink for given value of linkAlpha and returns k-flatClusters and dendrogram purity
:param origDistMat: Numpy Distance Matrix
:param k: Number of clusters to use to cut tree. If None, then tree is cut using threshold value
:param linkAlpha: Value of alpha parameter of expLink
:param numPoints: Number of points
:param pidToCluster: Dictionary that maps point id to ground-truth cluster id
:param threshold: Threshold to use to cut tree. If none then tree is cut using k(given)
:param scaleDist: Should all distances in the matrix be scaled using largest edge in distance matrix? Useful of preventing numerical overflows
:return: (y_pred, dendPurity)
y_pred : List that maps pid to its cluster id. Flat Clusters obtained by cutting tree using threshold or k
dendPurity: Dendrogram purity of cluster tree build by greedy HAC with given linkAlpha
"""
assert (k is None) or (threshold is None)
assert (k is not None) or (threshold is not None)
activeClusters = [pid for pid in range(numPoints)]
newCid = numPoints
pidToParent = {}
children = {pid:None for pid in activeClusters}
if k is not None and len(activeClusters) <= k:
# Technically I should construct a tree and then compute
# dendrogram purity in this case as well, but I am not doing it...
return activeClusters, 0
distMat = getDistMat(origDistMat=origDistMat, linkAlpha=linkAlpha, numPoints=numPoints,
matType="simple", scaleDist=scaleDist, dataStructType="allEdges")
y_pred = None
mergeTime = 0
updateTime = 0
cache = {}
ctr = 0
while len(activeClusters) > 1:
ctr+=1
# Find clusters to merge
t1 = time.time()
(c1,c2), merge_val = findMerger_allEdges(linkAlpha=linkAlpha, distMat=distMat, currClusterList=activeClusters, cache=cache)
# (c1_debug,c2_debug) = findMerger_matVersion(linkAlpha=linkAlpha, distMat=distMat_debug, currClusterList=activeClusters)
# merge_val_debug = distMat_debug[0][c1_debug][c2_debug]/distMat_debug[1][c1_debug][c2_debug]
# # if merge_val != merge_val_debug:
# print("round",ctr)
# # print("currClustList:",len(activeClusters), activeClusters)
# print("Merge vals,",c1,c2, merge_val)
# print("Merge vals debug,",c1_debug,c2_debug, merge_val_debug, distMat_debug[0][c1_debug][c2_debug], distMat_debug[1][c1_debug][c2_debug])
# # print("orig dist,",origDistMat[c1_debug][c2_debug])
# print("Linkage value of those chosen by mat version",calc_linkage(linkAlpha, distMat[c1_debug,c2_debug]))
# if min(c1,c2) != min(c1_debug,c2_debug ) or max(c1_debug,c2_debug) != max(c1,c2):
# raise Exception("Diiferen t.......")
# if round(abs(merge_val- merge_val_debug),2) != 0:
#
# raise Exception("Different ")
#
mergeTime += time.time() - t1
# Get flat clusters by cutting tree at threshold. y_pred being None means that flat clusters have not been obtained even once
# If flat clusters are obtained once by cutting the tree, then we need not do so in consecuting steps and y_pred is None condition prevents us from doingit
if (threshold is not None) and (y_pred is None):
cutTree = False
if merge_val > threshold: cutTree = True
if cutTree: # Should I cut tree now
pidToPredCluster_thresh = getPidToPredClusters(numPoints=numPoints, pidToParent=pidToParent)
y_pred = [pidToPredCluster_thresh[ pid ] for pid in range(numPoints)]
if pidToCluster is None: # No need to continue performing HAC because pidToCluster is None, so dendPurity can not be computed anyway
# if len(activeClusters) == numPoints:
# print("\n\n\n\nReturning just at the beginnning..........\n\n\n\n")
return y_pred, 0
try:
# Remove merged clusters for list
activeClusters.remove(c1)
activeClusters.remove(c2)
except Exception as e:
print(c1,c2)
raise e
t1 = time.time()
# Update distances of the merged cluster with all remaining clusters
distMat = updateDistMat_allEdges(linkAlpha=linkAlpha, distMat=distMat, matType="simple",
currClusterList=activeClusters, newCid=newCid, oldC1=c1, oldC2=c2)
# distMat_debug = updateDistMat_matVersion(linkAlpha=linkAlpha, distMat=distMat_debug, matType="simple",
# currClusterList=activeClusters, newCid=newCid, oldC1=c1_debug, oldC2=c2_debug)
updateTime += time.time() - t1
activeClusters.append(newCid)
children[newCid] = (c1,c2)
pidToParent[c1] = newCid
pidToParent[c2] = newCid
if k is not None and len(activeClusters) == k: # Get flat clusters such that there are k clusters
pidToPredCluster_k = getPidToPredClusters(numPoints=numPoints, pidToParent=pidToParent)
y_pred = [pidToPredCluster_k[ pid ] for pid in range(numPoints)]
if pidToCluster is None: # No need to continue performing HAC because pidToCluster is None, so dendPurity can not be computed anyway
# print("\t\tTime taken to update distance matrix (fast):{:.4f}".format(updateTime))
# print("\t\tTime taken to merger clusters (fast):{:.4f}".format(mergeTime))
return y_pred, 0
newCid += 1
if y_pred is None: # This is triggered when while loop terminated without forming flat clusters. it means that all points are put in 1 cluster
y_pred = [1 for x in range(numPoints)]
if pidToCluster is None:
dendPurity = 0
else:
dendPurity = computeDendPurity(pidToCluster=pidToCluster, children=children, pidToParent=pidToParent)
# print("\t\tTime taken to update distance matrix :{:.4f}".format(updateTime))
# print("\t\tTime taken to merger clusters :{:.4f}".format(mergeTime))
# y_pred_1, dendPurity_1 = runHAC(origDistMat, k, linkAlpha, numPoints, pidToCluster, threshold, scaleDist)
# if dendPurity != dendPurity_1:
# print(dendPurity, dendPurity_1)
# assert round(abs(dendPurity_1 - dendPurity),6) == 0
# exit(0)
return y_pred, dendPurity
# Run HAC on all points and try to compute loss
# This version uses two matrices to store numerator and denominator of ExpLink linkage separately,
# This has some numerical stability issues
def runHAC_torch_num_den(origDistMat, origTorchDistMat, linkAlpha, linkAlphaTorch, pidToGtCluster, numPoints, scaleDist):
"""
Runs HAC with ExpLink for given value of linkAlpha and tensors for some pure and impure merger values
This version uses two matrices to store numerator and denominator of ExpLink linkage separately,
This has some numerical stability issues
:param origDistMat: Numpy Distance Matrix
:param origTorchDistMat: Torch distance matrix
:param linkAlpha: Value of alpha parameter of expLink
:param linkAlphaTorch: Torch variable storing value of alpha parameter of explink
:param pidToCluster: Dictionary that maps point id to ground-truth cluster id
:param numPoints: Number of points
:param threshold: Threshold to use to cut tree. If none then tree is cut using k(given)
:param scaleDist: Should all distances in the matrix be scaled using largest edge in distance matrix? Useful of preventing numerical overflows
:return: posLinkageVals, negLinkageVals : Both are torch tensors
posLinkageVals: Linkage value of pure mergers that are worse than some impure merger
negLinkageVals: Linkge value of impure mergers that are better than some pure merger
"""
# Map each sub-cluster to its corresponding gt-cluster. This is helpful in finding if two clusters being merged are part of same gt-cluster or not
subClusterToGtCluster = copy.deepcopy(pidToGtCluster)
activeClusters = [pid for pid in range(numPoints)]
newCid = numPoints
posLinkageVals = torch.cuda.FloatTensor(np.zeros((numPoints-1, 1)))
negLinkageVals = torch.cuda.FloatTensor([]) # Empty tensor
pidToParent = {}
children = {pid: None for pid in activeClusters}
t1 = time.time()
distMat = getDistMat(origDistMat=origDistMat, linkAlpha=linkAlpha, numPoints=numPoints,
matType="simple", scaleDist=scaleDist, dataStructType="tuple")
t2 = time.time()
torchDistMat= getDistMat(origDistMat=origTorchDistMat, linkAlpha=linkAlphaTorch, numPoints=numPoints,
matType="pytorch", scaleDist=scaleDist, dataStructType="tuple")
t3 = time.time()
print("\t\tTime taken to get compatible matrices:{:.4f}\t{:.4f}\t{:.4f}".format(t3 - t1, t2 - t1, t3 - t2))
mergeTime, updateTimeTorch, updateTimeSimple, linkCalcTime = 0,0,0,0
while len(activeClusters) > 1:
# Find clusters to merge
t1 = time.time()
(c1, c2), impureLinkages = findPureMerger_tuple(linkAlpha=linkAlpha, distMat=distMat,currClusterList=activeClusters,
subClusterToGtCluster=subClusterToGtCluster)
mergeTime += time.time() - t1
if (c1 is None) and (c2 is None): # Could not find 2 pure clusters to merge. At this time, we have agglomerated all gt-clusters into separate sub-trees
break
subClusterToGtCluster[newCid] = subClusterToGtCluster[c1]
assert subClusterToGtCluster[c1] == subClusterToGtCluster[c2]
t1 = time.time()
if linkAlpha == "min" or linkAlpha == "max":
posLinkageVals[newCid - numPoints] = torchDistMat[(c1, c2)]
else:
posLinkageVals[newCid - numPoints] = torchDistMat[(c1, c2)][0]/torchDistMat[(c1, c2)][1]
tempNegLinkages = torch.cuda.FloatTensor(np.zeros(len(impureLinkages), 1))
if len(impureLinkages) > 0:
for ctr, (impC1, impC2) in enumerate(impureLinkages):
if linkAlpha == "min" or linkAlpha == "max":
tempNegLinkages[ctr] = torchDistMat[(impC1, impC2)]
else:
tempNegLinkages[ctr] = torchDistMat[(impC1, impC2)][0] / torchDistMat[(impC1, impC2)][1]
negLinkageVals = torch.cat( (negLinkageVals, tempNegLinkages) )
linkCalcTime += time.time() - t1
# Remove merged clusters for list
activeClusters.remove(c1)
activeClusters.remove(c2)
t1 = time.time()
# Update distances of the merged cluster with all remaining clusters
distMat = updateDistMat_tuple(linkAlpha=linkAlpha, distMat=distMat, currClusterList=activeClusters,
newCid=newCid, oldC1=c1, oldC2=c2)
updateTimeSimple += time.time() - t1
t1 = time.time()
torchDistMat = updateDistMat_tuple(linkAlpha=linkAlphaTorch, distMat=torchDistMat, currClusterList=activeClusters,
newCid=newCid, oldC1=c1, oldC2=c2)
updateTimeTorch += time.time() - t1
activeClusters.append(newCid)
children[newCid] = (c1, c2)
pidToParent[c1] = newCid
pidToParent[c2] = newCid
newCid += 1
# print("\t\tLinkCalc Time:{:.4f}".format(linkCalcTime))
# print("\t\tMerge Time:{:.4f}".format(mergeTime))
# print("\t\tupdateTimeTorch Time:{:.4f}".format(updateTimeTorch))
# print("\t\tupdateTimeSimple Time:{:.4f}".format(updateTimeSimple))
return posLinkageVals, negLinkageVals
# Run HAC on all points and try to compuate loss
# This version accumulates all edges going between clusters as clustering proceeds and then takes softMax followed by weighted average of
# those edges to compute affinity between two clusters. This has better numerical stabilities.
# Still using separate numerator and denominator numpy matrices to speed up finding best pure merger.
def runHAC_torch_allEdges(origDistMat, origTorchDistMat, linkAlpha, linkAlphaTorch, pidToGtCluster, numPoints, scaleDist, getBestImpure):
"""
Runs HAC with ExpLink for given value of linkAlpha and tensors for some pure and impure merger values
This version accumulates all edges going between clusters as clustering proceeds and then takes softMax followed by weighted average of
those edges to compute affinity between two clusters. This has better numerical stabilities.
Still using separate numerator and denominator numpy matrices to speed up finding best pure merger.
:param origDistMat: Numpy Distance Matrix
:param origTorchDistMat: Torch distance matrix
:param linkAlpha: Value of alpha parameter of expLink
:param linkAlphaTorch: Torch variable storing value of alpha parameter of explink
:param pidToCluster: Dictionary that maps point id to ground-truth cluster id
:param numPoints: Number of points
:param threshold: Threshold to use to cut tree. If none then tree is cut using k(given)
:param scaleDist: Should all distances in the matrix be scaled using largest edge in distance matrix? Useful of preventing numerical overflows
:param getBestImpure: Should we just add best impure merger to list of negLinkages or should we add all impure mergers
that are better than best pure merger to list of negLinkages?
:return: posLinkageVals, negLinkageVals : Both are torch tensors
posLinkageVals: Linkage value of pure mergers that are worse than some impure merger
negLinkageVals: Linkage value of impure mergers that are better than some pure merger
"""
SoftMax = torch.nn.Softmax(dim=0)
# Map each sub-cluster to its corresponding gt-cluster. This is helpful in finding if two clusters being merged are part of same gt-cluster or not
subClustToGtClust = copy.deepcopy(pidToGtCluster)
activeClusters = [pid for pid in range(numPoints)]
newCid = numPoints
posLinkageVals = torch.cuda.FloatTensor(np.zeros((numPoints-1,1)))
negLinkageVals = torch.cuda.FloatTensor([]) # Empty tensor
pidToParent = {}
children = {pid: None for pid in activeClusters}
t1 = time.time()
distMat = getDistMat(origDistMat=origDistMat, linkAlpha=linkAlpha, numPoints=numPoints,
matType="simple", scaleDist=scaleDist, dataStructType="matrix")
t2 = time.time()
torchDistMat= getDistMat(origDistMat=origTorchDistMat, linkAlpha=linkAlphaTorch, numPoints=numPoints,
matType="pytorch", scaleDist=scaleDist, dataStructType="allEdges")
t3 = time.time()
print("\t\tTime taken to get compatible matrices:{:.4f}\t{:.4f}\t{:.4f}".format(t3 - t1, t2 - t1, t3 - t2))
pureMergeExists = True
mergeTime, updateTimeTorch, updateTimeSimple, linkCalcTime = 0,0,0,0
while len(activeClusters) > 1:
# Find clusters to merge
t1 = time.time()
(pureC1, pureC2), impureLinkages = findPureMerger_matVersion(linkAlpha=linkAlpha, distMat=distMat,
currClusterList=activeClusters, subClusToGtClust=subClustToGtClust)
if (pureC1 is None) and (pureC2 is None): # Could not find 2 pure clusters to merge. At this time, we have agglomerated all gt-clusters into separate sub-trees
pureMergeExists = False
break # Break if we don't want to consider loss using just impure agglomerations
mergedC1, mergedC2 = pureC1, pureC2
subClustToGtClust[newCid] = subClustToGtClust[pureC1]
assert subClustToGtClust[pureC1] == subClustToGtClust[pureC2]
mergeTime += time.time() - t1
t1 = time.time()
betterNegLinkExists=True # Setting it to True because for now I want to add all pos linkage to loss
tempNegLinkages = torch.cuda.FloatTensor( np.zeros((len(impureLinkages), 1)) )
for ctr, (impC1, impC2) in enumerate(impureLinkages):
if impC1 != pureC1 and impC1 != pureC2 and impC2 != pureC1 and impC2 != pureC2:
# Don't consider this impure agglomeration as it is does not involve either c1 or c2
continue
else:
betterNegLinkExists=True
if linkAlpha == "min":
tempNegLinkages[ctr] = torch.min(torchDistMat[(impC1, impC2)])
elif linkAlpha == "max":
tempNegLinkages[ctr] = torch.max(torchDistMat[(impC1, impC2)])
else:
weights = SoftMax(linkAlphaTorch*torchDistMat[(impC1, impC2)])
tempNegLinkages[ctr] = torch.sum( weights*torchDistMat[(impC1, impC2)] )
if getBestImpure and tempNegLinkages.shape[0] > 0:
bestNegLinkage = torch.min(tempNegLinkages)
negLinkageVals = torch.cat( (negLinkageVals, bestNegLinkage) )
else:
negLinkageVals = torch.cat( (negLinkageVals, tempNegLinkages) )
if betterNegLinkExists and (pureC1 is not None) and (pureC2 is not None): # Add this to list of posLinkages only if there is a better impure agglomeration involving c1 and c2
if linkAlpha == "min":
posLinkageVals[newCid - numPoints] = torch.min(torchDistMat[(pureC1, pureC2)])
elif linkAlpha == "max":
posLinkageVals[newCid - numPoints] = torch.max(torchDistMat[(pureC1, pureC2)])
else:
weights = SoftMax(linkAlphaTorch*torchDistMat[(pureC1,pureC2)])
posLinkageVals[newCid - numPoints] = torch.sum(weights*torchDistMat[(pureC1,pureC2)])
t2 = time.time()
linkCalcTime += t2 - t1
# Remove merged clusters for list
activeClusters.remove(mergedC1)
activeClusters.remove(mergedC2)
t1 = time.time()
# Update distances of the merged cluster with all remaining clusters
distMat = updateDistMat_matVersion(linkAlpha=linkAlpha, distMat=distMat, matType="simple",
currClusterList=activeClusters, newCid=newCid, oldC1=mergedC1, oldC2=mergedC2)
updateTimeSimple += time.time() - t1
t1 = time.time()
# TODO: Can we avoid updating entrie torchDistMatrix? Can we just optimize to just compute linkageVals directly, maybe after the tree is constructed?
torchDistMat = updateDistMat_allEdges(linkAlpha=linkAlphaTorch, distMat=torchDistMat, matType="pytorch",
currClusterList=activeClusters, newCid=newCid, oldC1=mergedC1, oldC2=mergedC2)
updateTimeTorch += time.time() - t1
activeClusters.append(newCid)
children[newCid] = (mergedC1, mergedC2)
pidToParent[mergedC1] = newCid
pidToParent[mergedC2] = newCid
newCid += 1
# print("\t\tLinkCalc Time:{:.4f}".format(linkCalcTime))
# print("\t\tMerge Time:{:.4f}".format(mergeTime))
# print("\t\tupdateTimeTorch Time:{:.4f}".format(updateTimeTorch))
# print("\t\tupdateTimeSimple Time:{:.4f}".format(updateTimeSimple))
return posLinkageVals, negLinkageVals
def runHAC_torch_allEdges_faces(origDistMat, origTorchDistMat, linkAlpha, linkAlphaTorch, pidToGtCluster, numPoints, scaleDist, getBestImpure):
"""
This version keeps all edges around to calculate linkage between two clusters even for NP distance matrix.
This is very slow but avoids having overflow/underflow issues for Face Dataset which has distance of the order of
1000 units which when exponentiated often results in numerical overflows
Runs HAC with ExpLink for given value of linkAlpha and tensors for some pure and impure merger values
This version accumulates all edges going between clusters as clustering proceeds and then takes softMax followed by weighted average of
those edges to compute affinity between two clusters. This has better numerical stabilities.
Still using separate numerator and denominator numpy matrices to speed up finding best pure merger.
:param origDistMat: Numpy Distance Matrix
:param origTorchDistMat: Torch distance matrix
:param linkAlpha: Value of alpha parameter of expLink
:param linkAlphaTorch: Torch variable storing value of alpha parameter of explink
:param pidToCluster: Dictionary that maps point id to ground-truth cluster id
:param numPoints: Number of points
:param threshold: Threshold to use to cut tree. If none then tree is cut using k(given)
:param scaleDist: Should all distances in the matrix be scaled using largest edge in distance matrix? Useful of preventing numerical overflows
:param getBestImpure: Should we just add best impure merger to list of negLinkages or should we add all impure mergers
that are better than best pure merger to list of negLinkages?
:return: posLinkageVals, negLinkageVals : Both are torch tensors
posLinkageVals: Linkage value of pure mergers that are worse than some impure merger
negLinkageVals: Linkage value of impure mergers that are better than some pure merger
"""
SoftMax = torch.nn.Softmax(dim=0)
# Map each sub-cluster to its corresponding gt-cluster. This is helpful in finding if two clusters being merged are part of same gt-cluster or not
subClustToGtClust = copy.deepcopy(pidToGtCluster)
activeClusters = [pid for pid in range(numPoints)]
newCid = numPoints
posLinkageVals = torch.cuda.FloatTensor(np.zeros((numPoints-1,1)))
negLinkageVals = torch.cuda.FloatTensor([]) # Empty tensor
pidToParent = {}
children = {pid: None for pid in activeClusters}
t1 = time.time()
distMat = getDistMat(origDistMat=origDistMat, linkAlpha=linkAlpha, numPoints=numPoints,
matType="simple", scaleDist=scaleDist, dataStructType="allEdges")
t2 = time.time()
torchDistMat= getDistMat(origDistMat=origTorchDistMat, linkAlpha=linkAlphaTorch, numPoints=numPoints,
matType="pytorch", scaleDist=scaleDist, dataStructType="allEdges")
t3 = time.time()
print("\t\tTime taken to get compatible matrices:{:.4f}\t{:.4f}\t{:.4f}".format(t3 - t1, t2 - t1, t3 - t2))
pureMergeExists = True
mergeTime, updateTimeTorch, updateTimeSimple, linkCalcTime = 0,0,0,0
cache = {}
while len(activeClusters) > 1:
# Find clusters to merge
t1 = time.time()
(pureC1, pureC2), impureLinkages = findPureMerger_allEdges(linkAlpha=linkAlpha, distMat=distMat, cache=cache,
currClusterList=activeClusters, subClusterToGtCluster=subClustToGtClust)
if (pureC1 is None) and (pureC2 is None): # Could not find 2 pure clusters to merge. At this time, we have agglomerated all gt-clusters into separate sub-trees
pureMergeExists = False
break # Break if we don't want to consider loss using just impure agglomerations
mergedC1, mergedC2 = pureC1, pureC2
subClustToGtClust[newCid] = subClustToGtClust[pureC1]
assert subClustToGtClust[pureC1] == subClustToGtClust[pureC2]
mergeTime += time.time() - t1
t1 = time.time()
betterNegLinkExists=True # Setting it to True because for now I want to add all pos linkage to loss
tempNegLinkages = torch.cuda.FloatTensor( np.zeros((len(impureLinkages), 1)) )
for ctr, (impC1, impC2) in enumerate(impureLinkages):
if impC1 != pureC1 and impC1 != pureC2 and impC2 != pureC1 and impC2 != pureC2:
# Don't consider this impure agglomeration as it is does not involve either c1 or c2
continue
else:
betterNegLinkExists=True
if linkAlpha == "min":
tempNegLinkages[ctr] = torch.min(torchDistMat[(impC1, impC2)])
elif linkAlpha == "max":
tempNegLinkages[ctr] = torch.max(torchDistMat[(impC1, impC2)])
else:
weights = SoftMax(linkAlphaTorch*torchDistMat[(impC1, impC2)])
tempNegLinkages[ctr] = torch.sum( weights*torchDistMat[(impC1, impC2)] )
if getBestImpure and tempNegLinkages.shape[0] > 0:
bestNegLinkage = torch.min(tempNegLinkages)
negLinkageVals = torch.cat( (negLinkageVals, bestNegLinkage) )
else:
negLinkageVals = torch.cat( (negLinkageVals, tempNegLinkages) )
if betterNegLinkExists and (pureC1 is not None) and (pureC2 is not None): # Add this to list of posLinkages only if there is a better impure agglomeration involving c1 and c2
if linkAlpha == "min":
posLinkageVals[newCid - numPoints] = torch.min(torchDistMat[(pureC1, pureC2)])
elif linkAlpha == "max":
posLinkageVals[newCid - numPoints] = torch.max(torchDistMat[(pureC1, pureC2)])
else:
weights = SoftMax(linkAlphaTorch*torchDistMat[(pureC1,pureC2)])
posLinkageVals[newCid - numPoints] = torch.sum(weights*torchDistMat[(pureC1,pureC2)])
t2 = time.time()
linkCalcTime += t2 - t1
# Remove merged clusters for list
activeClusters.remove(mergedC1)
activeClusters.remove(mergedC2)
t1 = time.time()
# Update distances of the merged cluster with all remaining clusters
distMat = updateDistMat_allEdges(linkAlpha=linkAlpha, distMat=distMat, matType="simple",
currClusterList=activeClusters, newCid=newCid, oldC1=mergedC1, oldC2=mergedC2)
updateTimeSimple += time.time() - t1
t1 = time.time()
# TODO: Can we avoid updating entrie torchDistMatrix? Can we just optimize to just compute linkageVals directly, maybe after the tree is constructed?
torchDistMat = updateDistMat_allEdges(linkAlpha=linkAlphaTorch, distMat=torchDistMat, matType="pytorch",
currClusterList=activeClusters, newCid=newCid, oldC1=mergedC1, oldC2=mergedC2)
updateTimeTorch += time.time() - t1
activeClusters.append(newCid)
children[newCid] = (mergedC1, mergedC2)
pidToParent[mergedC1] = newCid
pidToParent[mergedC2] = newCid
newCid += 1
# print("\t\tLinkCalc Time:{:.4f}".format(linkCalcTime))
# print("\t\tMerge Time:{:.4f}".format(mergeTime))
# print("\t\tupdateTimeTorch Time:{:.4f}".format(updateTimeTorch))
# print("\t\tupdateTimeSimple Time:{:.4f}".format(updateTimeSimple))
return posLinkageVals, negLinkageVals
################### FIND CLUSTERS TO MERGER USING DATA STRUCTURE STORING DISTANCE BETWEEN CLUSTERS #####################
def findMerger_matVersion(linkAlpha, distMat, currClusterList):
"""
Finds and return ids of two clusters to merge according to linkage function given by linkAlpha
:param linkAlpha: Alpha parameter of ExpLink to use for calculating linkage value between clusters
:param distMat: Pair of matrices storing expLink linkage value b/w 2 clusters separately as numerators and denominators
:param currClusterList: List of cluster ids which correspond to roots of partially formed trees until this time
:return: ids of two clusters to merge according to linkage function given by linkAlpha
"""
ixgrid = np.ix_(currClusterList, currClusterList)
if linkAlpha == "min" or linkAlpha == "max":
newMat = distMat[ixgrid]
else:
newMat = np.nan_to_num(distMat[0][ixgrid]/distMat[1][ixgrid]) # Remove nans
newMat = newMat + np.diag([np.inf for i in currClusterList]) # Add inf along diagonal to avoid it when finding closest clusters to merge
flatIdx = np.argmin(newMat) # Find smallest value and get row and col corresponding to that value
fc1,fc2 = int(flatIdx/len(currClusterList)), flatIdx%len(currClusterList)
# Now get actual clustersIds using currClustersList as fc1, and fc2 are relative to submatrix on which min was calculated
fc1 = currClusterList[fc1]
fc2 = currClusterList[fc2]
if fc1 < fc2: # Just make sure that first cluster id is larger than second one
fc1,fc2 = fc2,fc1
return (fc1, fc2)
# Verifications
# currMin = None
# clusterPair = None,None
# for c1,c2 in itertools.combinations(currClusterList, 2):
# if c1 <= c2:
# c1,c2 = c2, c1
#
# if linkAlpha == 'min' or linkAlpha == 'max':
# tempDist = distMat[c1][c2]
# else:
# try:
# tempDist = distMat[0][c1][c2]/distMat[1][c1][c2]
# except Exception as e:
# print(c1,c2,"\n")
# print(distMat,"\n")
# print(currClusterList,"\n")
# raise e
#
# if currMin is None or tempDist < currMin :
# currMin = tempDist
# clusterPair = c1,c2
#
#
# if fastMin != currMin or fc1 != clusterPair[0] or fc2 != clusterPair[1]:
# print(fc1, fc2, clusterPair)
# print(fastMin, currMin)
# print(newMat.shape, len(currClusterList), newMat)
# # print(newMat2.shape, newMat2)
# # print(distMat[0][ixgrid])
# # print(distMat[1][ixgrid])
# return clusterPair
pass
def findMerger_allEdges(linkAlpha, distMat, currClusterList, cache):
"""
Returns best 2 sub-clusters to merge and linkage value of that merger
:param linkAlpha: Alpha parameter of ExpLink to use for calculating linkage value between clusters
:param distMat: Dictionary that maps a pair of cluster ids to (numerator,denominator) used to find expLink linkage b/w 2 clusters
:param currClusterList: List of cluster ids which correspond to roots of partially formed trees until this time
:return: Returns best 2 sub-clusters to merge and linkage value of that merger
"""
currMin = None
bestClusterPair = None, None
for c1, c2 in itertools.combinations(currClusterList, 2):
if (c1,c2) in cache:
tempDist = cache[(c1,c2)]
elif (c2,c1) in cache:
tempDist = cache[(c2,c1)]
else:
tempDist = calc_linkage_numpy(linkAlpha, distMat[(c1, c2)])
cache[(c1,c2)] = tempDist
cache[(c2,c1)] = tempDist
if (currMin is None or tempDist < currMin):
currMin = tempDist
bestClusterPair = (c1, c2)
return bestClusterPair, currMin
def findPureMerger_tuple(linkAlpha, distMat, currClusterList, subClusterToGtCluster):
"""
Returns best 2 sub-clusters(of same gt-clusters to merge, along with other mergers between
two sub-clusters(belonging to two different sub-clusters) which are better than that
:param linkAlpha: Alpha parameter of ExpLink to use for calculating linkage value between clusters
:param distMat: Dictionary that maps a pair of cluster ids to (numerator,denominator) used to find expLink linkage b/w 2 clusters
:param currClusterList: List of cluster ids which correspond to roots of partially formed trees until this time
:param subClusterToGtCluster: Maps each current cluster to its pure gt-cluster. Each current cluster can map to just 1 ground truth cluster because
during training we only perform pure mergers in function runHAC_torch_allEdges and runHAC_torch_num_den
:return: Returns best 2 sub-clusters(of same gt-clusters to merge, along with other mergers between
two sub-clusters(belonging to two different sub-clusters) which are better than that
"""
currPureMin = None
pureClusterPair = None, None
for c1, c2 in itertools.combinations(currClusterList, 2):
if linkAlpha == 'min' or linkAlpha == 'max':
tempDist = distMat[(c1, c2)]
else:
tempDist = distMat[(c1, c2)][0] / distMat[(c1, c2)][1]
isPureMerge = subClusterToGtCluster[c1] == subClusterToGtCluster[c2] # Are c1 and c2 two sub-clusters of the same gt-cluster
if (currPureMin is None or tempDist < currPureMin) and isPureMerge:
currPureMin = tempDist
pureClusterPair = c1, c2
if currPureMin is None:
return (None, None), []
impureMerges = []
for c1, c2 in itertools.combinations(currClusterList, 2):
if linkAlpha == 'min' or linkAlpha == 'max':
tempDist = distMat[(c1, c2)]
else:
tempDist = distMat[(c1, c2)][0] / distMat[(c1, c2)][1]
# No need to check if c1 and c2 are sub-clusters of same gt-cluster as we already picked best possible same class clusters to merge
# Now, there could be agglomerations that are between two clusters from different classes that might be better than currPureMin
if tempDist < currPureMin:
impureMerges += [(c1,c2)]
return pureClusterPair, impureMerges
def findPureMerger_allEdges(linkAlpha, distMat, currClusterList, subClusterToGtCluster, cache):
"""
Returns best 2 sub-clusters(of same gt-clusters to merge, along with other mergers between
two sub-clusters(belonging to two different sub-clusters) which are better than that
:param linkAlpha: Alpha parameter of ExpLink to use for calculating linkage value between clusters
:param distMat: Dictionary that maps a pair of cluster ids to (numerator,denominator) used to find expLink linkage b/w 2 clusters
:param currClusterList: List of cluster ids which correspond to roots of partially formed trees until this time
:param subClusterToGtCluster: Maps each current cluster to its pure gt-cluster. Each current cluster can map to just 1 ground truth cluster because
during training we only perform pure mergers in function runHAC_torch_allEdges and runHAC_torch_num_den
:return: Returns best 2 sub-clusters(of same gt-clusters to merge, along with other mergers between
two sub-clusters(belonging to two different sub-clusters) which are better than that
"""
currPureMin = None
pureClusterPair = None, None
for c1, c2 in itertools.combinations(currClusterList, 2):
if (c1,c2) in cache:
tempDist = cache[(c1,c2)]
elif (c2,c1) in cache:
tempDist = cache[(c2,c1)]
else:
tempDist = calc_linkage_numpy(linkAlpha, distMat[(c1, c2)])
cache[(c1,c2)] = tempDist
cache[(c2,c1)] = tempDist
isPureMerge = subClusterToGtCluster[c1] == subClusterToGtCluster[c2] # Are c1 and c2 two sub-clusters of the same gt-cluster
if (currPureMin is None or tempDist < currPureMin) and isPureMerge:
currPureMin = tempDist
pureClusterPair = c1, c2
if currPureMin is None:
return (None, None), []
impureMerges = []
for c1, c2 in itertools.combinations(currClusterList, 2):
if (c1,c2) in cache:
tempDist = cache[(c1,c2)]
elif (c2,c1) in cache:
tempDist = cache[(c2,c1)]
else:
tempDist = calc_linkage_numpy(linkAlpha, distMat[(c1, c2)])
cache[(c1,c2)] = tempDist
cache[(c2,c1)] = tempDist
# No need to check if c1 and c2 are sub-clusters of same gt-cluster as we already picked best possible same class clusters to merge
# Now, there could be agglomerations that are between two clusters from different classes that might be better than currPureMin
if tempDist < currPureMin:
impureMerges += [(c1,c2)]
return pureClusterPair, impureMerges
def findPureMerger_matVersion(linkAlpha, distMat, currClusterList, subClusToGtClust):
"""
Returns best 2 sub-clusters(of same gt-clusters to merge, along with other mergers between
two sub-clusters(belonging to two different sub-clusters) which are better than that
:param linkAlpha:
:param distMat:
:param currClusterList:
:param subClusToGtClust:
:return:
"""
currPureMin = None
pureClusterPair = None, None
for c1, c2 in itertools.combinations(currClusterList, 2):
isPureMerge = subClusToGtClust[c1] == subClusToGtClust[c2] # Are c1 and c2 two sub-clusters of the same gt-cluster
if not isPureMerge:
continue
if linkAlpha == 'min' or linkAlpha == 'max':
tempDist = distMat[c1, c2]
else:
tempDist = distMat[0][c1, c2] / distMat[1][c1, c2]
if (currPureMin is None or tempDist < currPureMin) and isPureMerge:
currPureMin = tempDist
pureClusterPair = c1, c2
if currPureMin is None:
return (None, None), []
impureMerges = []
for c1, c2 in itertools.combinations(currClusterList, 2):
if linkAlpha == 'min' or linkAlpha == 'max':
tempDist = distMat[c1, c2]
else:
tempDist = distMat[0][c1, c2] / distMat[1][c1, c2]
# No need to check if c1 and c2 are sub-clusters of same gt-cluster as we already picked best possible same class clusters to merge
# Now, there could be agglomerations that are between two clusters from different classes that might be better than currPureMin
if tempDist < currPureMin:
impureMerges += [(c1,c2)]
return pureClusterPair, impureMerges
########################################################################################################################
################################ UPDATE DATA STRUCTURE STORING DISTANCE BETWEEN CLUSTERS ###############################
def updateDistMat_tuple(linkAlpha, distMat, currClusterList, newCid, oldC2, oldC1):
for cid in currClusterList:
if isinstance(linkAlpha,str) and linkAlpha == 'min':
distMat[(cid, newCid)] = min(distMat[(cid, oldC1)], distMat[(cid, oldC2)])
distMat[(newCid, cid)] = min(distMat[(oldC1, cid)], distMat[(oldC2, cid)])
elif isinstance(linkAlpha,str) and linkAlpha == 'max':
distMat[(cid, newCid)] = max(distMat[(cid, oldC1)], distMat[(cid, oldC2)])
distMat[(newCid, cid)] = max(distMat[(oldC1, cid)], distMat[(oldC2, cid)])
elif (isinstance(linkAlpha, torch.autograd.Variable)) or isinstance(linkAlpha, float) or isinstance(linkAlpha, int):
distMat[(cid, newCid)] = ( distMat[(cid, oldC1)][0] + distMat[(cid, oldC2)][0], distMat[(cid, oldC1)][1] + distMat[(cid, oldC2)][1] )
distMat[(newCid, cid)] = ( distMat[(oldC1, cid)][0] + distMat[(oldC2, cid)][0], distMat[(oldC1, cid)][1] + distMat[(oldC2, cid)][1] )
else:
raise Exception("Invalid value for linkAlpha = {}".format(linkAlpha))
return distMat
def updateDistMat_allEdges(linkAlpha, distMat, currClusterList, newCid, oldC2, oldC1, matType="pytorch"):
if matType == "simple":
for cid in currClusterList:
distMat[(newCid, cid)] = np.concatenate( (distMat[(cid, oldC1)], distMat[(cid, oldC2)]) )
distMat[(cid, newCid)] = np.concatenate( (distMat[(oldC1, cid)], distMat[(oldC2, cid)]) )
elif matType == "pytorch":
for cid in currClusterList:
distMat[(newCid, cid)] = torch.cat( (distMat[(cid, oldC1)], distMat[(cid, oldC2)]) )
distMat[(cid, newCid)] = torch.cat( (distMat[(oldC1, cid)], distMat[(oldC2, cid)]) )
else:
raise Exception("Invalid matType={}".format(matType))
return distMat
def updateDistMat_matVersion(linkAlpha, distMat, matType, currClusterList, newCid, oldC2, oldC1):
if isinstance(linkAlpha,str) and linkAlpha == 'min':
if matType == "simple":
distMat[newCid,:] = np.minimum(distMat[oldC1,:], distMat[oldC2,:])
distMat[:,newCid] = np.minimum(distMat[:,oldC1], distMat[:,oldC2])
elif matType == "pytorch":
distMat[newCid,:] = torch.min(distMat[oldC1,:], distMat[oldC2,:])
distMat[:,newCid] = torch.min(distMat[:,oldC1], distMat[:,oldC2])
else:
raise Exception("Invalid matrix type= {}".format(matType))
elif isinstance(linkAlpha,str) and linkAlpha == 'max':
if matType == "simple":
distMat[newCid,:] = np.maximum(distMat[oldC1,:], distMat[oldC2,:])
distMat[:,newCid] = np.maximum(distMat[:,oldC1], distMat[:,oldC2])
elif matType == "pytorch":
distMat[newCid,:] = torch.max(distMat[oldC1,:], distMat[oldC2,:])
distMat[:,newCid] = torch.max(distMat[:,oldC1], distMat[:,oldC2])
else:
raise Exception("Invalid matrix type= {}".format(matType))
elif (isinstance(linkAlpha, torch.autograd.Variable)) or isinstance(linkAlpha, float) or isinstance(linkAlpha, int):
if matType == "simple":
distMat[0][newCid,:] = distMat[0][oldC1,:] + distMat[0][oldC2,:]
distMat[0][:,newCid] = distMat[0][:,oldC1] + distMat[0][:,oldC2]
distMat[1][newCid,:] = distMat[1][oldC1,:] + distMat[1][oldC2,:]
distMat[1][:,newCid] = distMat[1][:,oldC1] + distMat[1][:,oldC2]
elif matType == "pytorch":
distMat[0][newCid,:] = distMat[0][oldC1,:] + distMat[0][oldC2,:]
distMat[0][:,newCid] = distMat[0][:,oldC1] + distMat[0][:,oldC2]
distMat[1][newCid,:] = distMat[1][oldC1,:] + distMat[1][oldC2,:]
distMat[1][:,newCid] = distMat[1][:,oldC1] + distMat[1][:,oldC2]
else:
raise Exception("Invalid matrix type= {}".format(matType))
else:
raise Exception("Invalid value for linkAlpha = {}".format(linkAlpha))
return distMat
########################################################################################################################
################################ GET DATA STRUCTURE TO STORE DISTANCE BETWEEN CLUSTERS #################################
def getDistMat(origDistMat, numPoints, linkAlpha, matType, dataStructType ,scaleDist):
"""
Return distance matrix used by HAC
:param origDistMat:
:param numPoints:
:param linkAlpha:
:param matType:
:param scaleDist:
:return:
"""
# Dividing each edge by the largest edge to make the computation scale invariant
if scaleDist:
print("\n\n\nScaling all distances using largest edges....\n\n\n\n")
if matType == "simple":
largestEdge = np.amax(origDistMat,axis=None)
assert largestEdge != np.inf
assert largestEdge != np.nan
origDistMat = origDistMat/largestEdge
elif matType == "pytorch":
largestEdge = torch.max(origDistMat)
origDistMat = origDistMat/largestEdge
else:
raise Exception("Invalid matrix type:{}".format(matType))
else:
pass
# print("\n\n\n NOT NOT NOT NOT Scaling distances....\n\n\n\n")
if dataStructType == "tuple":
''' Distance matrix is a dictionary. Key is pair of cluster ids, and value is either (numerator and denominator)
used to calculate linkage value or value is linkage value itself.
'''
return getDistMatrix_tuple(origDistMat=origDistMat, numPoints=numPoints, linkAlpha=linkAlpha, matType=matType)
elif dataStructType == "matrix":
''' If linkAlpha is a number, then distance matrix is actually 2 matrices.
(i,j) of 1st matrix is numerator and of 2nd matrix is denominator for linkage value between cluster i and j.
If linkAlpha is min or max then there is only one matrix with (i,j) storing linkage value between i and j
'''
return getDistMatrix_matVersion(origDistMat=origDistMat, numPoints=numPoints, linkAlpha=linkAlpha, matType=matType)
elif dataStructType == "allEdges":
'''
distance Matrix is a dictionary. Key is pair of cluster ids, and value is list of all edges between cluster i & j
'''
return getDistMatrix_allEdges(origDistMat=origDistMat, numPoints=numPoints, matType=matType)
else:
raise Exception("Invalud dataStructType={}".format(dataStructType))
def getDistMatrix_tuple(origDistMat, numPoints, linkAlpha, matType):
# Brute force way
distMat = {}
for pid1, pid2 in itertools.combinations(range(numPoints),2):
tempDist = origDistMat[pid1][pid2]
if isinstance(linkAlpha,str) and ( linkAlpha == 'min' or linkAlpha == 'max' ):
distMat[(pid1,pid2)] = tempDist
distMat[(pid2,pid1)] = tempDist
elif matType == "simple" and (isinstance(linkAlpha, float) or isinstance(linkAlpha, int)):
distMat[(pid1, pid2)] = np.exp(linkAlpha*tempDist)*tempDist, math.exp(linkAlpha*tempDist)
distMat[(pid2, pid1)] = np.exp(linkAlpha*tempDist)*tempDist, math.exp(linkAlpha*tempDist)
elif matType == "pytorch" and ( isinstance(linkAlpha, float) or isinstance(linkAlpha, int) or isinstance(linkAlpha, torch.autograd.Variable)):
distMat[(pid1, pid2)] = torch.exp(linkAlpha*tempDist)*tempDist, torch.exp(linkAlpha*tempDist)
distMat[(pid2, pid1)] = torch.exp(linkAlpha*tempDist)*tempDist, torch.exp(linkAlpha*tempDist)
else:
raise Exception("Invalid linkage alpha :{} of type:{} or matType:{} of type:{}".format(linkAlpha,type(linkAlpha), matType, type(matType)))
return distMat
def getDistMatrix_allEdges(origDistMat, numPoints, matType):
distMat = {}
if matType == "simple":
for pid1, pid2 in itertools.combinations(range(2*numPoints-1),2):
if pid1 < numPoints and pid2 < numPoints:
tempDist = origDistMat[pid1][pid2]
distMat[(pid1, pid2)] = np.array([tempDist])
distMat[(pid2, pid1)] = np.array([tempDist])
# print("getDist",tempDist, type(distMat[(pid1,pid2)]),distMat[(pid1,pid2)] )
else:
distMat[(pid1, pid2)] = np.array([])
distMat[(pid2, pid1)] = np.array([])
elif matType == "pytorch":
for pid1, pid2 in itertools.combinations(range(2*numPoints-1),2):
if pid1 < numPoints and pid2 < numPoints:
tempDist = origDistMat[pid1][pid2]
distMat[(pid1, pid2)] = tempDist.view(1)
distMat[(pid2, pid1)] = tempDist.view(1)
else:
distMat[(pid1, pid2)] = []
distMat[(pid2, pid1)] = []
else:
raise Exception("Invalud matType={}".format(matType))
return distMat
def getDistMatrix_matVersion(origDistMat, numPoints, linkAlpha, matType):
if isinstance(linkAlpha, str) and (linkAlpha == "min" or linkAlpha == "max"):
if matType == "simple":
idxList = list(range(numPoints))
ixgrid = np.ix_(idxList, idxList)
distMatSimple = np.zeros((2*numPoints - 1, 2*numPoints-1))
distMatSimple[ixgrid] = origDistMat
return distMatSimple
elif matType == "pytorch":
idxList = list(range(numPoints))
ixgrid = np.ix_(idxList, idxList)
if isinstance(origDistMat, torch.cuda.FloatTensor):
distMatSimple = torch.cuda.FloatTensor( np.zeros((2*numPoints - 1, 2*numPoints-1)) )
else:
distMatSimple = torch.FloatTensor(np.zeros((2*numPoints - 1, 2*numPoints-1)))
distMatSimple[ixgrid] = origDistMat
return distMatSimple
else:
raise Exception("Invalid linkage alpha :{} of type:{} or matType:{} of type:{}".format(linkAlpha,type(linkAlpha), matType, type(matType)))
elif matType == "simple" and (isinstance(linkAlpha, float) or isinstance(linkAlpha, int)):
distMatNum = np.zeros((2*numPoints - 1, 2*numPoints-1))
distMatDen = np.zeros((2*numPoints - 1, 2*numPoints-1))
idxList = list(range(numPoints))
ixgrid = np.ix_(idxList, idxList)
distMatDen[ixgrid] = np.exp(origDistMat*linkAlpha)
distMatNum[ixgrid] = np.multiply(distMatDen[ixgrid], origDistMat)
return distMatNum, distMatDen
elif matType == "pytorch" and ( isinstance(linkAlpha, float) or isinstance(linkAlpha, int) or isinstance(linkAlpha, torch.autograd.Variable)):
if isinstance(origDistMat, torch.cuda.FloatTensor):
distMatNum = torch.cuda.FloatTensor( 2*numPoints - 1, 2*numPoints-1 ).fill_(0)
distMatDen = torch.cuda.FloatTensor( 2*numPoints - 1, 2*numPoints-1 ).fill_(0)
else:
distMatNum = torch.zeros(2*numPoints - 1, 2*numPoints-1)
distMatDen = torch.zeros(2*numPoints - 1, 2*numPoints-1)
idxList = list(range(numPoints))
ixgrid = np.ix_(idxList, idxList)
distMatDen[ixgrid] = torch.exp(origDistMat*linkAlpha)
distMatNum[ixgrid] = torch.mul(distMatDen[ixgrid], origDistMat)
return distMatNum, distMatDen
else:
raise Exception("Invalid linkage alpha :{} of type:{} or matType:{} of type:{}".format(linkAlpha,type(linkAlpha), matType, type(matType)))
########################################################################################################################
def write_tree(treeFilename, pidToCluster, children, pidToParent):
root = list(pidToCluster.keys())[0]
while root in pidToParent :
root = pidToParent[root]
with open(treeFilename,"w") as writer:
for nodeId in children:
if children[nodeId] is None: continue
child0,child1 = children[nodeId]
child0Label = pidToCluster[child0] if child0 in pidToCluster else "None"
child1Label = pidToCluster[child1] if child1 in pidToCluster else "None"
writer.write("{}\t{}\t{}\n".format(child0, nodeId, child0Label))
writer.write("{}\t{}\t{}\n".format(child1, nodeId, child1Label))
writer.write("{}\tNone\tNone\n".format(root))
def computeDendPurity(pidToCluster, children, pidToParent):
dendPurity = 0
XCLUSTER_ROOT = os.getenv("XCLUSTER_ROOT")
filenum = time.time()
treeFilename = "{}/perchTree_{}.tree".format(XCLUSTER_ROOT, filenum)
write_tree(treeFilename=treeFilename, pidToCluster=pidToCluster, children=children, pidToParent=pidToParent)
assert os.path.isfile(treeFilename)
command = "cd $XCLUSTER_ROOT && source bin/setup.sh && pwd && "
command += "sh bin/util/score_tree.sh {} algo data 24 None > treeResult_{}".format(treeFilename,filenum)
# print("Executing command = {}".format(command))
os.system(command)
resultFileName = "{}/treeResult_{}".format(XCLUSTER_ROOT, filenum)
with open(resultFileName,"r") as reader:
for line in reader:
algo, data, dendPurity = line.split()
dendPurity = float(dendPurity)
break
command = "rm {} && rm {}".format(treeFilename, resultFileName)
# print("Removing files:{}".format(command))
os.system(command)
assert not os.path.isfile(treeFilename)
assert not os.path.isfile(resultFileName)
return dendPurity
# Returns flat clustering for tree built so far
def getPidToPredClusters(pidToParent, numPoints):
pidToPredCluster = {}
for pid in range(numPoints):
currPid = pid
parentPid = currPid
while currPid in pidToParent:
parentPid = pidToParent[currPid]
currPid = parentPid
pidToPredCluster[pid] = parentPid
return pidToPredCluster
if __name__ == "__main__":
pass
| 43.596694
| 177
| 0.724484
| 6,785
| 52,752
| 5.608106
| 0.085777
| 0.007148
| 0.008278
| 0.007253
| 0.808494
| 0.789388
| 0.764132
| 0.743896
| 0.718798
| 0.694068
| 0
| 0.018282
| 0.163236
| 52,752
| 1,209
| 178
| 43.632754
| 0.843751
| 0.388725
| 0
| 0.724771
| 0
| 0
| 0.041229
| 0.003727
| 0
| 0
| 0
| 0.001654
| 0.018349
| 1
| 0.03211
| false
| 0.004587
| 0.007645
| 0
| 0.094801
| 0.009174
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d9ca2f0eab90741e419d452440dd1589ca4190c9
| 6,834
|
py
|
Python
|
test/test_container_project_registry_api.py
|
hyperonecom/h1-client-python
|
4ce355852ba3120ec1b8f509ab5894a5c08da730
|
[
"MIT"
] | null | null | null |
test/test_container_project_registry_api.py
|
hyperonecom/h1-client-python
|
4ce355852ba3120ec1b8f509ab5894a5c08da730
|
[
"MIT"
] | null | null | null |
test/test_container_project_registry_api.py
|
hyperonecom/h1-client-python
|
4ce355852ba3120ec1b8f509ab5894a5c08da730
|
[
"MIT"
] | null | null | null |
"""
HyperOne
HyperOne API # noqa: E501
The version of the OpenAPI document: 0.1.0
Generated by: https://openapi-generator.tech
"""
import unittest
import h1
from h1.api.container_project_registry_api import ContainerProjectRegistryApi # noqa: E501
class TestContainerProjectRegistryApi(unittest.TestCase):
"""ContainerProjectRegistryApi unit test stubs"""
def setUp(self):
self.api = ContainerProjectRegistryApi() # noqa: E501
def tearDown(self):
pass
def test_container_project_registry_create(self):
"""Test case for container_project_registry_create
Create container/registry # noqa: E501
"""
pass
def test_container_project_registry_credential_create(self):
"""Test case for container_project_registry_credential_create
Create container/registry.credential # noqa: E501
"""
pass
def test_container_project_registry_credential_delete(self):
"""Test case for container_project_registry_credential_delete
Delete container/registry.credential # noqa: E501
"""
pass
def test_container_project_registry_credential_get(self):
"""Test case for container_project_registry_credential_get
Get container/registry.credential # noqa: E501
"""
pass
def test_container_project_registry_credential_list(self):
"""Test case for container_project_registry_credential_list
List container/registry.credential # noqa: E501
"""
pass
def test_container_project_registry_credential_patch(self):
"""Test case for container_project_registry_credential_patch
Update container/registry.credential # noqa: E501
"""
pass
def test_container_project_registry_delete(self):
"""Test case for container_project_registry_delete
Delete container/registry # noqa: E501
"""
pass
def test_container_project_registry_domain_create(self):
"""Test case for container_project_registry_domain_create
Create container/registry.domain # noqa: E501
"""
pass
def test_container_project_registry_domain_delete(self):
"""Test case for container_project_registry_domain_delete
Delete container/registry.domain # noqa: E501
"""
pass
def test_container_project_registry_domain_get(self):
"""Test case for container_project_registry_domain_get
Get container/registry.domain # noqa: E501
"""
pass
def test_container_project_registry_domain_list(self):
"""Test case for container_project_registry_domain_list
List container/registry.domain # noqa: E501
"""
pass
def test_container_project_registry_event_get(self):
"""Test case for container_project_registry_event_get
Get container/registry.event # noqa: E501
"""
pass
def test_container_project_registry_event_list(self):
"""Test case for container_project_registry_event_list
List container/registry.event # noqa: E501
"""
pass
def test_container_project_registry_get(self):
"""Test case for container_project_registry_get
Get container/registry # noqa: E501
"""
pass
def test_container_project_registry_list(self):
"""Test case for container_project_registry_list
List container/registry # noqa: E501
"""
pass
def test_container_project_registry_repository_get(self):
"""Test case for container_project_registry_repository_get
Get container/registry.repository # noqa: E501
"""
pass
def test_container_project_registry_repository_image_delete(self):
"""Test case for container_project_registry_repository_image_delete
Delete container/registry.image # noqa: E501
"""
pass
def test_container_project_registry_repository_image_get(self):
"""Test case for container_project_registry_repository_image_get
Get container/registry.image # noqa: E501
"""
pass
def test_container_project_registry_repository_image_list(self):
"""Test case for container_project_registry_repository_image_list
List container/registry.image # noqa: E501
"""
pass
def test_container_project_registry_repository_list(self):
"""Test case for container_project_registry_repository_list
List container/registry.repository # noqa: E501
"""
pass
def test_container_project_registry_service_get(self):
"""Test case for container_project_registry_service_get
Get container/registry.service # noqa: E501
"""
pass
def test_container_project_registry_service_list(self):
"""Test case for container_project_registry_service_list
List container/registry.service # noqa: E501
"""
pass
def test_container_project_registry_start(self):
"""Test case for container_project_registry_start
Start container/registry # noqa: E501
"""
pass
def test_container_project_registry_stop(self):
"""Test case for container_project_registry_stop
Stop container/registry # noqa: E501
"""
pass
def test_container_project_registry_tag_create(self):
"""Test case for container_project_registry_tag_create
Create container/registry.tag # noqa: E501
"""
pass
def test_container_project_registry_tag_delete(self):
"""Test case for container_project_registry_tag_delete
Delete container/registry.tag # noqa: E501
"""
pass
def test_container_project_registry_tag_get(self):
"""Test case for container_project_registry_tag_get
Get container/registry.tag # noqa: E501
"""
pass
def test_container_project_registry_tag_list(self):
"""Test case for container_project_registry_tag_list
List container/registry.tag # noqa: E501
"""
pass
def test_container_project_registry_tag_put(self):
"""Test case for container_project_registry_tag_put
Replace container/registry.tag # noqa: E501
"""
pass
def test_container_project_registry_transfer(self):
"""Test case for container_project_registry_transfer
Transfer container/registry # noqa: E501
"""
pass
def test_container_project_registry_update(self):
"""Test case for container_project_registry_update
Update container/registry # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
| 27.780488
| 91
| 0.682324
| 755
| 6,834
| 5.805298
| 0.076821
| 0.229979
| 0.344969
| 0.141456
| 0.865617
| 0.837554
| 0.826603
| 0.791011
| 0.54757
| 0.472964
| 0
| 0.020943
| 0.252414
| 6,834
| 245
| 92
| 27.893878
| 0.836954
| 0.474246
| 0
| 0.444444
| 1
| 0
| 0.002777
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.458333
| false
| 0.444444
| 0.041667
| 0
| 0.513889
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 10
|
d9cba1ac67d30024ddd445e5c89d59b4be46f943
| 8,369
|
py
|
Python
|
tests/packages/test_main.py
|
julienmalard/poetry-core
|
eb2c91553ace540bb1e827cdd2b7b7c89d65b961
|
[
"MIT"
] | null | null | null |
tests/packages/test_main.py
|
julienmalard/poetry-core
|
eb2c91553ace540bb1e827cdd2b7b7c89d65b961
|
[
"MIT"
] | null | null | null |
tests/packages/test_main.py
|
julienmalard/poetry-core
|
eb2c91553ace540bb1e827cdd2b7b7c89d65b961
|
[
"MIT"
] | null | null | null |
from poetry.core.packages.dependency import Dependency
def test_dependency_from_pep_508():
name = "requests"
dep = Dependency.create_from_pep_508(name)
assert dep.name == name
assert str(dep.constraint) == "*"
def test_dependency_from_pep_508_with_version():
name = "requests==2.18.0"
dep = Dependency.create_from_pep_508(name)
assert dep.name == "requests"
assert str(dep.constraint) == "2.18.0"
def test_dependency_from_pep_508_with_parens():
name = "requests (==2.18.0)"
dep = Dependency.create_from_pep_508(name)
assert dep.name == "requests"
assert str(dep.constraint) == "2.18.0"
def test_dependency_from_pep_508_with_constraint():
name = "requests>=2.12.0,!=2.17.*,<3.0"
dep = Dependency.create_from_pep_508(name)
assert dep.name == "requests"
assert str(dep.constraint) == ">=2.12.0,<2.17.0 || >=2.18.0,<3.0"
def test_dependency_from_pep_508_with_extras():
name = 'requests==2.18.0; extra == "foo" or extra == "bar"'
dep = Dependency.create_from_pep_508(name)
assert dep.name == "requests"
assert str(dep.constraint) == "2.18.0"
assert dep.in_extras == ["foo", "bar"]
assert str(dep.marker) == 'extra == "foo" or extra == "bar"'
def test_dependency_from_pep_508_with_python_version():
name = 'requests (==2.18.0); python_version == "2.7" or python_version == "2.6"'
dep = Dependency.create_from_pep_508(name)
assert dep.name == "requests"
assert str(dep.constraint) == "2.18.0"
assert dep.extras == frozenset()
assert dep.python_versions == "~2.7 || ~2.6"
assert str(dep.marker) == 'python_version == "2.7" or python_version == "2.6"'
def test_dependency_from_pep_508_with_single_python_version():
name = 'requests (==2.18.0); python_version == "2.7"'
dep = Dependency.create_from_pep_508(name)
assert dep.name == "requests"
assert str(dep.constraint) == "2.18.0"
assert dep.extras == frozenset()
assert dep.python_versions == "~2.7"
assert str(dep.marker) == 'python_version == "2.7"'
def test_dependency_from_pep_508_with_platform():
name = 'requests (==2.18.0); sys_platform == "win32" or sys_platform == "darwin"'
dep = Dependency.create_from_pep_508(name)
assert dep.name == "requests"
assert str(dep.constraint) == "2.18.0"
assert dep.extras == frozenset()
assert dep.python_versions == "*"
assert str(dep.marker) == 'sys_platform == "win32" or sys_platform == "darwin"'
def test_dependency_from_pep_508_complex():
name = (
"requests (==2.18.0); "
'python_version >= "2.7" and python_version != "3.2" '
'and (sys_platform == "win32" or sys_platform == "darwin") '
'and extra == "foo"'
)
dep = Dependency.create_from_pep_508(name)
assert dep.name == "requests"
assert str(dep.constraint) == "2.18.0"
assert dep.in_extras == ["foo"]
assert dep.python_versions == ">=2.7 !=3.2.*"
assert str(dep.marker) == (
'python_version >= "2.7" and python_version != "3.2" '
'and (sys_platform == "win32" or sys_platform == "darwin") '
'and extra == "foo"'
)
def test_dependency_python_version_in():
name = "requests (==2.18.0); python_version in '3.3 3.4 3.5'"
dep = Dependency.create_from_pep_508(name)
assert dep.name == "requests"
assert str(dep.constraint) == "2.18.0"
assert dep.python_versions == "3.3.* || 3.4.* || 3.5.*"
assert str(dep.marker) == 'python_version in "3.3 3.4 3.5"'
def test_dependency_python_version_in_comma():
name = "requests (==2.18.0); python_version in '3.3, 3.4, 3.5'"
dep = Dependency.create_from_pep_508(name)
assert dep.name == "requests"
assert str(dep.constraint) == "2.18.0"
assert dep.python_versions == "3.3.* || 3.4.* || 3.5.*"
assert str(dep.marker) == 'python_version in "3.3, 3.4, 3.5"'
def test_dependency_platform_in():
name = "requests (==2.18.0); sys_platform in 'win32 darwin'"
dep = Dependency.create_from_pep_508(name)
assert dep.name == "requests"
assert str(dep.constraint) == "2.18.0"
assert str(dep.marker) == 'sys_platform in "win32 darwin"'
def test_dependency_with_extra():
name = "requests[security] (==2.18.0)"
dep = Dependency.create_from_pep_508(name)
assert dep.name == "requests"
assert str(dep.constraint) == "2.18.0"
assert len(dep.extras) == 1
assert "security" in dep.extras
def test_dependency_from_pep_508_with_python_version_union_of_multi():
name = (
"requests (==2.18.0); "
'(python_version >= "2.7" and python_version < "2.8") '
'or (python_version >= "3.4" and python_version < "3.5")'
)
dep = Dependency.create_from_pep_508(name)
assert dep.name == "requests"
assert str(dep.constraint) == "2.18.0"
assert dep.extras == frozenset()
assert dep.python_versions == ">=2.7 <2.8 || >=3.4 <3.5"
assert str(dep.marker) == (
'python_version >= "2.7" and python_version < "2.8" '
'or python_version >= "3.4" and python_version < "3.5"'
)
def test_dependency_from_pep_508_with_not_in_op_marker():
name = (
"jinja2 (>=2.7,<2.8)"
'; python_version not in "3.0,3.1,3.2" and extra == "export"'
)
dep = Dependency.create_from_pep_508(name)
assert dep.name == "jinja2"
assert str(dep.constraint) == ">=2.7,<2.8"
assert dep.in_extras == ["export"]
assert dep.python_versions == "!=3.0.*, !=3.1.*, !=3.2.*"
assert (
str(dep.marker) == 'python_version not in "3.0,3.1,3.2" and extra == "export"'
)
def test_dependency_from_pep_508_with_git_url():
name = "django-utils @ git+ssh://git@corp-gitlab.com/corp-utils.git@1.2"
dep = Dependency.create_from_pep_508(name)
assert "django-utils" == dep.name
assert dep.is_vcs()
assert "git" == dep.vcs
assert "ssh://git@corp-gitlab.com/corp-utils.git" == dep.source
assert "1.2" == dep.reference
def test_dependency_from_pep_508_with_git_url_and_comment_and_extra():
name = (
"poetry @ git+https://github.com/python-poetry/poetry.git@b;ar;#egg=poetry"
' ; extra == "foo;"'
)
dep = Dependency.create_from_pep_508(name)
assert "poetry" == dep.name
assert dep.is_vcs()
assert "git" == dep.vcs
assert "https://github.com/python-poetry/poetry.git" == dep.source
assert "b;ar;" == dep.reference
assert dep.in_extras == ["foo;"]
def test_dependency_from_pep_508_with_url():
name = "django-utils @ https://example.com/django-utils-1.0.0.tar.gz"
dep = Dependency.create_from_pep_508(name)
assert "django-utils" == dep.name
assert dep.is_url()
assert "https://example.com/django-utils-1.0.0.tar.gz" == dep.url
def test_dependency_from_pep_508_with_wheel_url():
name = (
"example_wheel @ https://example.com/example_wheel-14.0.2-py2.py3-none-any.whl"
)
dep = Dependency.create_from_pep_508(name)
assert "example-wheel" == dep.name
assert str(dep.constraint) == "14.0.2"
def test_dependency_from_pep_508_with_python_full_version():
name = (
"requests (==2.18.0); "
'(python_version >= "2.7" and python_version < "2.8") '
'or (python_full_version >= "3.4" and python_full_version < "3.5.4")'
)
dep = Dependency.create_from_pep_508(name)
assert dep.name == "requests"
assert str(dep.constraint) == "2.18.0"
assert dep.extras == frozenset()
assert dep.python_versions == ">=2.7 <2.8 || >=3.4 <3.5.4"
assert str(dep.marker) == (
'python_version >= "2.7" and python_version < "2.8" '
'or python_full_version >= "3.4" and python_full_version < "3.5.4"'
)
def test_dependency_from_pep_508_with_python_full_version_pep440_compatible_release_astrix():
name = 'pathlib2 ; python_version == "3.4.*" or python_version < "3"'
dep = Dependency.create_from_pep_508(name)
assert dep.name == "pathlib2"
assert str(dep.constraint) == "*"
assert dep.python_versions == "==3.4.* || <3"
def test_dependency_from_pep_508_with_python_full_version_pep440_compatible_release_tilde():
name = 'pathlib2 ; python_version ~= "3.4" or python_version < "3"'
dep = Dependency.create_from_pep_508(name)
assert dep.name == "pathlib2"
assert str(dep.constraint) == "*"
assert dep.python_versions == "~=3.4 || <3"
| 32.564202
| 93
| 0.645358
| 1,240
| 8,369
| 4.124194
| 0.075806
| 0.072155
| 0.078217
| 0.062964
| 0.88815
| 0.848455
| 0.813258
| 0.742276
| 0.701995
| 0.663277
| 0
| 0.066017
| 0.190943
| 8,369
| 256
| 94
| 32.691406
| 0.689263
| 0
| 0
| 0.483696
| 0
| 0.054348
| 0.323575
| 0.0141
| 0
| 0
| 0
| 0
| 0.456522
| 1
| 0.119565
| false
| 0
| 0.005435
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d9cf51c6a7d3f0c944e5bd86f076633d029e286f
| 2,652
|
py
|
Python
|
tests/unit_tests/homeassistant/test_online_sensor.py
|
graham33/teslajsonpy
|
1432c04d7039e485a36cde360ee94e505d9a5b47
|
[
"Apache-2.0"
] | null | null | null |
tests/unit_tests/homeassistant/test_online_sensor.py
|
graham33/teslajsonpy
|
1432c04d7039e485a36cde360ee94e505d9a5b47
|
[
"Apache-2.0"
] | null | null | null |
tests/unit_tests/homeassistant/test_online_sensor.py
|
graham33/teslajsonpy
|
1432c04d7039e485a36cde360ee94e505d9a5b47
|
[
"Apache-2.0"
] | null | null | null |
"""Test online sensor."""
import pytest
from teslajsonpy.controller import Controller
from teslajsonpy.homeassistant.binary_sensor import OnlineSensor
from tests.tesla_mock import TeslaMock
def test_has_battery(monkeypatch):
"""Test has_battery()."""
_mock = TeslaMock(monkeypatch)
_controller = Controller(None)
_data = _mock.data_request_vehicle()
_sensor = OnlineSensor(_data, _controller)
assert not _sensor.has_battery()
def test_get_value_on_init(monkeypatch):
"""Test get_value() after initialization."""
_mock = TeslaMock(monkeypatch)
_controller = Controller(None)
_data = _mock.data_request_vehicle()
_sensor = OnlineSensor(_data, _controller)
assert not _sensor is None
assert _sensor.get_value() is None
@pytest.mark.asyncio
async def test_get_value_after_update(monkeypatch):
"""Test get_value() after an update."""
_mock = TeslaMock(monkeypatch)
_controller = Controller(None)
monkeypatch.setitem(_controller.car_online, "5YJSA11111111111", True)
monkeypatch.setitem(
_controller.car_state, "5YJSA11111111111", TeslaMock.data_request_vehicle()
)
_data = _mock.data_request_vehicle()
_sensor = OnlineSensor(_data, _controller)
await _sensor.async_update()
assert not _sensor is None
assert not _sensor.get_value() is None
assert _sensor.get_value()
@pytest.mark.asyncio
async def test_get_value_on(monkeypatch):
"""Test get_value() for online mode."""
_mock = TeslaMock(monkeypatch)
_controller = Controller(None)
monkeypatch.setitem(_controller.car_online, "5YJSA11111111111", True)
monkeypatch.setitem(
_controller.car_state, "5YJSA11111111111", TeslaMock.data_request_vehicle()
)
_data = _mock.data_request_vehicle()
_sensor = OnlineSensor(_data, _controller)
_data["state"] = "online"
await _sensor.async_update()
assert not _sensor is None
assert not _sensor.get_value() is None
assert _sensor.get_value()
@pytest.mark.asyncio
async def test_get_value_off(monkeypatch):
"""Test get_value() for offline mode."""
_mock = TeslaMock(monkeypatch)
_controller = Controller(None)
monkeypatch.setitem(_controller.car_online, "5YJSA11111111111", False)
monkeypatch.setitem(
_controller.car_state, "5YJSA11111111111", TeslaMock.data_request_vehicle()
)
_data = _mock.data_request_vehicle()
_sensor = OnlineSensor(_data, _controller)
_data["state"] = "asleep"
await _sensor.async_update()
assert not _sensor is None
assert not _sensor.get_value() is None
assert not _sensor.get_value()
| 26.787879
| 83
| 0.729638
| 301
| 2,652
| 6.049834
| 0.162791
| 0.065898
| 0.074135
| 0.102142
| 0.84514
| 0.772103
| 0.757825
| 0.74849
| 0.728171
| 0.728171
| 0
| 0.032892
| 0.174585
| 2,652
| 98
| 84
| 27.061224
| 0.798995
| 0.029412
| 0
| 0.704918
| 0
| 0
| 0.048361
| 0
| 0
| 0
| 0
| 0
| 0.196721
| 1
| 0.032787
| false
| 0
| 0.065574
| 0
| 0.098361
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d9fdd413e1c5e6f500bf8dc8cb407ce4aff025fe
| 259
|
py
|
Python
|
torch/distributed/fsdp/__init__.py
|
ljhOfGithub/pytorch
|
c568f7b16f2a98d72ff5b7c6c6161b67b2c27514
|
[
"Intel"
] | 1
|
2022-03-29T00:44:31.000Z
|
2022-03-29T00:44:31.000Z
|
torch/distributed/fsdp/__init__.py
|
ljhOfGithub/pytorch
|
c568f7b16f2a98d72ff5b7c6c6161b67b2c27514
|
[
"Intel"
] | null | null | null |
torch/distributed/fsdp/__init__.py
|
ljhOfGithub/pytorch
|
c568f7b16f2a98d72ff5b7c6c6161b67b2c27514
|
[
"Intel"
] | 1
|
2022-03-28T21:49:41.000Z
|
2022-03-28T21:49:41.000Z
|
from .flatten_params_wrapper import FlatParameter
from .fully_sharded_data_parallel import FullyShardedDataParallel
from .fully_sharded_data_parallel import CPUOffload, BackwardPrefetch, ShardingStrategy
from .fully_sharded_data_parallel import StateDictType
| 51.8
| 87
| 0.907336
| 29
| 259
| 7.724138
| 0.517241
| 0.120536
| 0.214286
| 0.267857
| 0.455357
| 0.455357
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069498
| 259
| 4
| 88
| 64.75
| 0.929461
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8a1d6fdd27a9d8b75beb4bdb6065d672b7922654
| 54,953
|
py
|
Python
|
tensorflow_estimator/python/estimator/head/multi_label_head_test.py
|
ziky90/estimator
|
825c02ce244ce21ec4f01360dfdf90cbf92f6bde
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_estimator/python/estimator/head/multi_label_head_test.py
|
ziky90/estimator
|
825c02ce244ce21ec4f01360dfdf90cbf92f6bde
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_estimator/python/estimator/head/multi_label_head_test.py
|
ziky90/estimator
|
825c02ce244ce21ec4f01360dfdf90cbf92f6bde
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for multi_label_head.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import six
from tensorflow.python.eager import context
from tensorflow.python.feature_column import feature_column_lib as feature_column
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import string_ops
from tensorflow.python.ops import variables
from tensorflow.python.ops.losses import losses
from tensorflow.python.platform import test
from tensorflow.python.training import monitored_session
from tensorflow_estimator.python.estimator import model_fn
from tensorflow_estimator.python.estimator.canned import dnn
from tensorflow_estimator.python.estimator.canned import metric_keys
from tensorflow_estimator.python.estimator.canned import prediction_keys
from tensorflow_estimator.python.estimator.head import head_utils as test_lib
from tensorflow_estimator.python.estimator.head import multi_label_head as head_lib
def _sigmoid_cross_entropy(labels, logits):
"""Returns sigmoid cross entropy averaged over classes."""
sigmoid_logits = 1 / (1 + np.exp(-logits))
unreduced_result = (
-labels * np.log(sigmoid_logits)
-(1 - labels) * np.log(1 - sigmoid_logits))
# Mean over classes
return np.mean(unreduced_result, axis=-1, keepdims=True)
class MultiLabelHead(test.TestCase):
def test_n_classes_is_none(self):
with self.assertRaisesRegexp(
ValueError,
r'n_classes must be > 1 for multi-label classification\. Given: None'):
head_lib.MultiLabelHead(n_classes=None)
def test_n_classes_is_1(self):
with self.assertRaisesRegexp(
ValueError,
r'n_classes must be > 1 for multi-label classification\. Given: 1'):
head_lib.MultiLabelHead(n_classes=1)
def test_threshold_too_small(self):
with self.assertRaisesRegexp(
ValueError,
r'thresholds must be in \(0, 1\) range\. Given: 0\.0'):
head_lib.MultiLabelHead(n_classes=2, thresholds=[0., 0.5])
def test_threshold_too_large(self):
with self.assertRaisesRegexp(
ValueError,
r'thresholds must be in \(0, 1\) range\. Given: 1\.0'):
head_lib.MultiLabelHead(n_classes=2, thresholds=[0.5, 1.0])
def test_label_vocabulary_dict(self):
with self.assertRaisesRegexp(
ValueError,
r'label_vocabulary must be a list or tuple\. '
r'Given type: <(type|class) \'dict\'>'):
head_lib.MultiLabelHead(n_classes=2, label_vocabulary={'foo': 'bar'})
def test_label_vocabulary_wrong_size(self):
with self.assertRaisesRegexp(
ValueError,
r'Length of label_vocabulary must be n_classes \(3\). Given: 2'):
head_lib.MultiLabelHead(n_classes=3, label_vocabulary=['foo', 'bar'])
def test_invalid_loss_reduction(self):
with self.assertRaisesRegexp(
ValueError, r'Invalid loss_reduction: invalid_loss_reduction'):
head_lib.MultiLabelHead(
n_classes=3, loss_reduction='invalid_loss_reduction')
with self.assertRaisesRegexp(
ValueError, r'Invalid loss_reduction: none'):
head_lib.MultiLabelHead(
n_classes=3, loss_reduction=losses.Reduction.NONE)
def test_loss_fn_arg_labels_missing(self):
def _loss_fn(logits):
del logits # Unused
with self.assertRaisesRegexp(
ValueError,
r'loss_fn must contain argument: labels\. '
r'Given arguments: \(\'logits\',\)'):
head_lib.MultiLabelHead(n_classes=3, loss_fn=_loss_fn)
def test_loss_fn_arg_logits_missing(self):
def _loss_fn(labels):
del labels # unused
with self.assertRaisesRegexp(
ValueError,
r'loss_fn must contain argument: logits\. '
r'Given arguments: \(\'labels\',\)'):
head_lib.MultiLabelHead(n_classes=3, loss_fn=_loss_fn)
def test_loss_fn_arg_features_ok(self):
def _loss_fn(labels, logits, features):
del labels, logits, features # Unused
head_lib.MultiLabelHead(n_classes=3, loss_fn=_loss_fn)
def test_loss_fn_arg_invalid(self):
def _loss_fn(labels, logits, name=None):
del labels, logits, name # Unused
with self.assertRaisesRegexp(
ValueError,
r'loss_fn has unexpected args: \[\'name\'\]'):
head_lib.MultiLabelHead(n_classes=3, loss_fn=_loss_fn)
def test_classes_for_class_based_metrics_invalid(self):
with self.assertRaisesRegexp(
ValueError,
r'All classes_for_class_based_metrics must be in range \[0, 2\]\. '
r'Given: -1'):
head_lib.MultiLabelHead(
n_classes=3, classes_for_class_based_metrics=[2, -1])
def test_classes_for_class_based_metrics_string_invalid(self):
with self.assertRaisesRegexp(
ValueError, r'\'z\' is not in list'):
head_lib.MultiLabelHead(
n_classes=3, label_vocabulary=['a', 'b', 'c'],
classes_for_class_based_metrics=['c', 'z'])
def test_predict(self):
n_classes = 4
head = head_lib.MultiLabelHead(n_classes)
self.assertEqual(n_classes, head.logits_dimension)
logits = np.array(
[[0., 1., 2., -1.], [-1., -2., -3., 1.]], dtype=np.float32)
expected_probabilities = nn.sigmoid(logits)
expected_export_classes = [[b'0', b'1', b'2', b'3']] * 2
keys = prediction_keys.PredictionKeys
preds = head.predictions(logits, [keys.LOGITS, keys.PROBABILITIES])
self.assertAllClose(logits, self.evaluate(preds[keys.LOGITS]))
self.assertAllClose(expected_probabilities,
self.evaluate(preds[keys.PROBABILITIES]))
if context.executing_eagerly():
return
spec = head.create_estimator_spec(
features={'x': np.array(((42,),), dtype=np.int32)},
mode=model_fn.ModeKeys.PREDICT,
logits=logits)
self.assertItemsEqual(
(test_lib._DEFAULT_SERVING_KEY, 'predict', 'classification'),
spec.export_outputs.keys())
# Assert predictions and export_outputs.
with self.cached_session() as sess:
test_lib._initialize_variables(self, spec.scaffold)
self.assertIsNone(spec.scaffold.summary_op)
predictions = sess.run(spec.predictions)
self.assertAllClose(logits,
predictions[prediction_keys.PredictionKeys.LOGITS])
self.assertAllClose(
expected_probabilities,
predictions[prediction_keys.PredictionKeys.PROBABILITIES])
self.assertAllClose(
expected_probabilities,
sess.run(spec.export_outputs[test_lib._DEFAULT_SERVING_KEY].scores))
self.assertAllEqual(
expected_export_classes,
sess.run(spec.export_outputs[test_lib._DEFAULT_SERVING_KEY].classes))
def test_weight_should_not_impact_prediction(self):
n_classes = 4
head = head_lib.MultiLabelHead(n_classes, weight_column='example_weights')
self.assertEqual(n_classes, head.logits_dimension)
logits = np.array(
[[0., 1., 2., -1.], [-1., -2., -3., 1.]], dtype=np.float32)
expected_probabilities = nn.sigmoid(logits)
weights_2x1 = [[1.], [2.]]
features = {
'x': np.array(((42,),), dtype=np.int32),
'example_weights': weights_2x1
}
keys = prediction_keys.PredictionKeys
preds = head.predictions(logits, [keys.LOGITS, keys.PROBABILITIES])
self.assertAllClose(logits, self.evaluate(preds[keys.LOGITS]))
self.assertAllClose(expected_probabilities,
self.evaluate(preds[keys.PROBABILITIES]))
if context.executing_eagerly():
return
spec = head.create_estimator_spec(
features=features,
mode=model_fn.ModeKeys.PREDICT,
logits=logits)
# Assert predictions and export_outputs.
with self.cached_session() as sess:
test_lib._initialize_variables(self, spec.scaffold)
self.assertIsNone(spec.scaffold.summary_op)
predictions = sess.run(spec.predictions)
self.assertAllClose(logits,
predictions[prediction_keys.PredictionKeys.LOGITS])
self.assertAllClose(
expected_probabilities,
predictions[prediction_keys.PredictionKeys.PROBABILITIES])
def test_eval_create_loss(self):
"""Tests head.loss for eval mode."""
n_classes = 2
head = head_lib.MultiLabelHead(n_classes)
logits = np.array([[-1., 1.], [-1.5, 1.]], dtype=np.float32)
labels = np.array([[1, 0], [1, 1]], dtype=np.int64)
features = {'x': np.array(((42,),), dtype=np.int32)}
# loss = (labels * -log(sigmoid(logits)) +
# (1 - labels) * -log(1 - sigmoid(logits))) / 2
expected_training_loss = 0.5 * np.sum(
_sigmoid_cross_entropy(labels=labels, logits=logits))
actual_training_loss = head.loss(
logits=logits,
labels=labels,
features=features,
mode=model_fn.ModeKeys.EVAL)
self.assertAllClose(expected_training_loss,
self.evaluate(actual_training_loss))
def test_eval_create_loss_large_logits(self):
"""Tests head.loss for eval mode and large logits."""
n_classes = 2
head = head_lib.MultiLabelHead(n_classes)
logits = np.array([[-10., 10.], [-15., 10.]], dtype=np.float32)
labels = np.array([[1, 0], [1, 1]], dtype=np.int64)
features = {'x': np.array(((42,),), dtype=np.int32)}
# loss = labels * -log(sigmoid(logits)) +
# (1 - labels) * -log(1 - sigmoid(logits))
# For large logits, this is approximated as:
# loss = labels * (logits < 0) * (-logits) +
# (1 - labels) * (logits > 0) * logits
expected_training_loss = 0.5 * np.sum(
np.array([[(10. + 10.) / 2.], [(15. + 0.) / 2.]], dtype=np.float32))
actual_training_loss = head.loss(
logits=logits,
labels=labels,
features=features,
mode=model_fn.ModeKeys.EVAL)
self.assertAllClose(expected_training_loss,
self.evaluate(actual_training_loss), atol=1e-4)
def test_eval_create_loss_labels_wrong_shape(self):
"""Tests head.loss for eval mode when labels has the wrong shape."""
n_classes = 2
head = head_lib.MultiLabelHead(n_classes)
logits = np.array([[-1., 1.], [-1.5, 1.]], dtype=np.float32)
labels_2x1 = np.array([[1], [1]], dtype=np.int64)
labels_2 = np.array([1, 1], dtype=np.int64)
features = {'x': np.array(((42,),), dtype=np.int32)}
if context.executing_eagerly():
with self.assertRaisesRegexp(ValueError, 'Expected labels dimension=2'):
head.loss(logits=logits, labels=labels_2x1, features=features,
mode=model_fn.ModeKeys.EVAL)
with self.assertRaisesRegexp(ValueError, 'Expected labels dimension=2'):
head.loss(logits=logits, labels=labels_2, features=features,
mode=model_fn.ModeKeys.EVAL)
else:
labels_placeholder = array_ops.placeholder(dtype=dtypes.int64)
actual_training_loss = head.loss(
logits=logits, labels=labels_placeholder, features=features,
mode=model_fn.ModeKeys.EVAL)
with self.cached_session():
test_lib._initialize_variables(self, monitored_session.Scaffold())
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
r'\[expected_labels_shape: \] \[2 2\] \[labels_shape: \] \[2 1\]'):
actual_training_loss.eval({
labels_placeholder: labels_2x1
})
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
r'labels shape must be \[D0, D1, ... DN, 2\]\..*'
r'\[Received shape: \] \[2\]'):
actual_training_loss.eval({
labels_placeholder: labels_2
})
def test_eval_create_loss_loss_fn(self):
"""Tests head.loss for eval mode and custom loss_fn."""
loss = np.array([[1.], [2.]], dtype=np.float32)
logits_input = np.array([[-10., 10.], [-15., 10.]], dtype=np.float32)
labels_input = np.array([[1, 0], [1, 1]], dtype=np.int64)
def _loss_fn(labels, logits):
check_labels = control_flow_ops.Assert(
math_ops.reduce_all(math_ops.equal(labels, labels_input)),
data=[labels])
check_logits = control_flow_ops.Assert(
math_ops.reduce_all(math_ops.equal(logits, logits_input)),
data=[logits])
with ops.control_dependencies([check_labels, check_logits]):
return constant_op.constant(loss)
head = head_lib.MultiLabelHead(n_classes=2, loss_fn=_loss_fn)
actual_training_loss = head.loss(
logits=logits_input,
labels=labels_input,
features={'x': np.array(((42,),), dtype=np.int32)},
mode=model_fn.ModeKeys.EVAL)
self.assertAllClose(np.sum(loss) / 2., self.evaluate(actual_training_loss))
def test_eval_create_loss_loss_fn_wrong_shape(self):
"""Tests custom loss_fn that returns Tensor of unexpected shape."""
loss = np.array([1., 2.], dtype=np.float32)
def _loss_fn(labels, logits):
del labels, logits # Unused
return constant_op.constant(loss)
head = head_lib.MultiLabelHead(n_classes=2, loss_fn=_loss_fn)
logits = np.array([[-10., 10.], [-15., 10.]], dtype=np.float32)
labels = np.array([[1, 0], [1, 1]], dtype=np.int64)
features = {'x': np.array(((42,),), dtype=np.int32)}
if context.executing_eagerly():
with self.assertRaisesRegexp(
ValueError,
'loss_shape'):
head.loss(logits=logits, labels=labels, features=features,
mode=model_fn.ModeKeys.EVAL)
else:
actual_training_loss = head.loss(
logits=logits, labels=labels, features=features,
mode=model_fn.ModeKeys.EVAL)
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
r'\[loss_fn must return Tensor of shape \[D0, D1, ... DN, 1\]\. \] '
r'\[logits_shape: \] \[2 2\] \[loss_shape: \] \[2\]'):
self.evaluate(actual_training_loss)
def test_eval_labels_none(self):
"""Tests that error is raised when labels is None."""
head = head_lib.MultiLabelHead(n_classes=2)
with self.assertRaisesRegexp(
ValueError, r'You must provide a labels Tensor\. Given: None\.'):
head.loss(
logits=np.array([[-10., 10.], [-15., 10.]], dtype=np.float32),
labels=None,
features={'x': np.array(((42,),), dtype=np.int32)},
mode=model_fn.ModeKeys.EVAL)
def _test_eval(
self, head, logits, labels, expected_loss, expected_metrics,
features=None, regularization_losses=None):
tol = 1e-3
if context.executing_eagerly():
loss = head.loss(
logits, labels, features=features or {}, mode=model_fn.ModeKeys.EVAL,
regularization_losses=regularization_losses)
self.assertIsNotNone(loss)
self.assertAllClose(expected_loss, loss, rtol=tol, atol=tol)
eval_metrics = head.metrics(regularization_losses=regularization_losses)
updated_metrics = head.update_metrics(
eval_metrics, features or {}, logits, labels,
regularization_losses=regularization_losses)
self.assertItemsEqual(expected_metrics.keys(), updated_metrics.keys())
self.assertAllClose(
expected_metrics,
{k: updated_metrics[k].result() for k in updated_metrics},
rtol=tol,
atol=tol)
return
spec = head.create_estimator_spec(
features=features or {},
mode=model_fn.ModeKeys.EVAL,
logits=logits,
labels=labels,
regularization_losses=regularization_losses)
# Assert spec contains expected tensors.
self.assertIsNotNone(spec.loss)
self.assertItemsEqual(expected_metrics.keys(), spec.eval_metric_ops.keys())
self.assertIsNone(spec.train_op)
self.assertIsNone(spec.export_outputs)
test_lib._assert_no_hooks(self, spec)
# Assert predictions, loss, and metrics.
with self.cached_session() as sess:
test_lib._initialize_variables(self, spec.scaffold)
self.assertIsNone(spec.scaffold.summary_op)
value_ops = {k: spec.eval_metric_ops[k][0] for k in spec.eval_metric_ops}
update_ops = {k: spec.eval_metric_ops[k][1] for k in spec.eval_metric_ops}
loss, _ = sess.run((spec.loss, update_ops))
self.assertAllClose(expected_loss, loss, rtol=tol, atol=tol)
# Check results of value ops (in `metrics`).
self.assertAllClose(
expected_metrics, {k: value_ops[k].eval() for k in value_ops},
rtol=tol,
atol=tol)
def test_eval(self):
n_classes = 2
head = head_lib.MultiLabelHead(n_classes)
logits = np.array([[-1., 1.], [-1.5, 1.5]], dtype=np.float32)
labels = np.array([[1, 0], [1, 1]], dtype=np.int64)
# loss = labels * -log(sigmoid(logits)) +
# (1 - labels) * -log(1 - sigmoid(logits))
# Sum over examples, divide by batch_size.
expected_loss = 0.5 * np.sum(
_sigmoid_cross_entropy(labels=labels, logits=logits))
keys = metric_keys.MetricKeys
expected_metrics = {
# Average loss over examples.
keys.LOSS_MEAN: expected_loss,
# auc and auc_pr cannot be reliably calculated for only 4 samples, but
# this assert tests that the algorithm remains consistent.
keys.AUC: 0.3333,
keys.AUC_PR: 0.7689,
}
self._test_eval(
head=head,
logits=logits,
labels=labels,
expected_loss=expected_loss,
expected_metrics=expected_metrics)
def test_eval_sparse_labels(self):
n_classes = 2
head = head_lib.MultiLabelHead(n_classes)
logits = np.array([[-1., 1.], [-1.5, 1.5]], dtype=np.float32)
# Equivalent to multi_hot = [[1, 0], [1, 1]]
labels = sparse_tensor.SparseTensor(
values=[0, 0, 1],
indices=[[0, 0], [1, 0], [1, 1]],
dense_shape=[2, 2])
labels_multi_hot = np.array([[1, 0], [1, 1]], dtype=np.int64)
# loss = labels * -log(sigmoid(logits)) +
# (1 - labels) * -log(1 - sigmoid(logits))
# Sum over examples, divide by batch_size.
expected_loss = 0.5 * np.sum(
_sigmoid_cross_entropy(labels=labels_multi_hot, logits=logits))
keys = metric_keys.MetricKeys
expected_metrics = {
# Average loss over examples.
keys.LOSS_MEAN: expected_loss,
# auc and auc_pr cannot be reliably calculated for only 4 samples, but
# this assert tests that the algorithm remains consistent.
keys.AUC: 0.3333,
keys.AUC_PR: 0.7689,
}
self._test_eval(
head=head,
logits=logits,
labels=labels,
expected_loss=expected_loss,
expected_metrics=expected_metrics)
def test_eval_with_regularization_losses(self):
n_classes = 2
head = head_lib.MultiLabelHead(
n_classes, loss_reduction=losses.Reduction.SUM_OVER_BATCH_SIZE)
logits = np.array([[-1., 1.], [-1.5, 1.5]], dtype=np.float32)
labels = np.array([[1, 0], [1, 1]], dtype=np.int64)
regularization_losses = [1.5, 0.5]
expected_regularization_loss = 2.
# unregularized_loss = sum(
# labels * -log(sigmoid(logits)) +
# (1 - labels) * -log(1 - sigmoid(logits))) / batch_size
expected_unregularized_loss = np.sum(
_sigmoid_cross_entropy(labels=labels, logits=logits)) / 2.
expected_regularized_loss = (
expected_unregularized_loss + expected_regularization_loss)
keys = metric_keys.MetricKeys
expected_metrics = {
keys.LOSS_MEAN: expected_unregularized_loss,
keys.LOSS_REGULARIZATION: expected_regularization_loss,
# auc and auc_pr cannot be reliably calculated for only 4 samples, but
# this assert tests that the algorithm remains consistent.
keys.AUC: 0.3333,
keys.AUC_PR: 0.7689,
}
self._test_eval(
head=head,
logits=logits,
labels=labels,
expected_loss=expected_regularized_loss,
expected_metrics=expected_metrics,
regularization_losses=regularization_losses)
def test_eval_with_label_vocabulary(self):
n_classes = 2
head = head_lib.MultiLabelHead(
n_classes, label_vocabulary=['class0', 'class1'])
logits = np.array([[-1., 1.], [-1.5, 1.5]], dtype=np.float32)
# Equivalent to multi_hot = [[1, 0], [1, 1]]
labels = sparse_tensor.SparseTensor(
values=['class0', 'class0', 'class1'],
indices=[[0, 0], [1, 0], [1, 1]],
dense_shape=[2, 2])
labels_multi_hot = np.array([[1, 0], [1, 1]], dtype=np.int64)
# loss = labels * -log(sigmoid(logits)) +
# (1 - labels) * -log(1 - sigmoid(logits))
# Sum over examples, divide by batch_size.
expected_loss = 0.5 * np.sum(
_sigmoid_cross_entropy(labels=labels_multi_hot, logits=logits))
keys = metric_keys.MetricKeys
expected_metrics = {
# Average loss over examples.
keys.LOSS_MEAN: expected_loss,
# auc and auc_pr cannot be reliably calculated for only 4 samples, but
# this assert tests that the algorithm remains consistent.
keys.AUC: 0.3333,
keys.AUC_PR: 0.7689,
}
self._test_eval(
head=head,
logits=logits,
labels=labels,
expected_loss=expected_loss,
expected_metrics=expected_metrics)
def test_eval_with_label_vocabulary_with_multi_hot_input(self):
n_classes = 2
head = head_lib.MultiLabelHead(
n_classes, label_vocabulary=['class0', 'class1'])
logits = np.array([[-1., 1.], [-1.5, 1.5]], dtype=np.float32)
labels_multi_hot = np.array([[1, 0], [1, 1]], dtype=np.int64)
# loss = labels * -log(sigmoid(logits)) +
# (1 - labels) * -log(1 - sigmoid(logits))
# Sum over examples, divide by batch_size.
expected_loss = 0.5 * np.sum(
_sigmoid_cross_entropy(labels=labels_multi_hot, logits=logits))
keys = metric_keys.MetricKeys
expected_metrics = {
# Average loss over examples.
keys.LOSS_MEAN: expected_loss,
# auc and auc_pr cannot be reliably calculated for only 4 samples, but
# this assert tests that the algorithm remains consistent.
keys.AUC: 0.3333,
keys.AUC_PR: 0.7689,
}
self._test_eval(
head=head,
logits=logits,
labels=labels_multi_hot,
expected_loss=expected_loss,
expected_metrics=expected_metrics)
def test_eval_with_thresholds(self):
n_classes = 2
thresholds = [0.25, 0.5, 0.75]
head = head_lib.MultiLabelHead(n_classes, thresholds=thresholds)
logits = np.array([[-1., 1.], [-1.5, 1.5]], dtype=np.float32)
labels = np.array([[1, 0], [1, 1]], dtype=np.int64)
# loss = labels * -log(sigmoid(logits)) +
# (1 - labels) * -log(1 - sigmoid(logits))
# Sum over examples, divide by batch_size.
expected_loss = 0.5 * np.sum(
_sigmoid_cross_entropy(labels=labels, logits=logits))
keys = metric_keys.MetricKeys
expected_metrics = {
# Average loss over examples.
keys.LOSS_MEAN: expected_loss,
# auc and auc_pr cannot be reliably calculated for only 4 samples, but
# this assert tests that the algorithm remains consistent.
keys.AUC: 0.3333,
keys.AUC_PR: 0.7689,
keys.ACCURACY_AT_THRESHOLD % thresholds[0]: 2. / 4.,
keys.PRECISION_AT_THRESHOLD % thresholds[0]: 2. / 3.,
keys.RECALL_AT_THRESHOLD % thresholds[0]: 2. / 3.,
keys.ACCURACY_AT_THRESHOLD % thresholds[1]: 1. / 4.,
keys.PRECISION_AT_THRESHOLD % thresholds[1]: 1. / 2.,
keys.RECALL_AT_THRESHOLD % thresholds[1]: 1. / 3.,
keys.ACCURACY_AT_THRESHOLD % thresholds[2]: 2. / 4.,
keys.PRECISION_AT_THRESHOLD % thresholds[2]: 1. / 1.,
keys.RECALL_AT_THRESHOLD % thresholds[2]: 1. / 3.,
}
self._test_eval(
head=head,
logits=logits,
labels=labels,
expected_loss=expected_loss,
expected_metrics=expected_metrics)
def test_eval_with_classes_for_class_based_metrics(self):
head = head_lib.MultiLabelHead(
n_classes=2, classes_for_class_based_metrics=[0, 1])
logits = np.array([[-1., 1.], [-1.5, 1.5]], dtype=np.float32)
labels = np.array([[1, 0], [1, 1]], dtype=np.int64)
# loss = labels * -log(sigmoid(logits)) +
# (1 - labels) * -log(1 - sigmoid(logits))
# Sum over examples, divide by batch_size.
expected_loss = 0.5 * np.sum(
_sigmoid_cross_entropy(labels=labels, logits=logits))
keys = metric_keys.MetricKeys
expected_metrics = {
# Average loss over examples.
keys.LOSS_MEAN: expected_loss,
# auc and auc_pr cannot be reliably calculated for only 4 samples, but
# this assert tests that the algorithm remains consistent.
keys.AUC: 0.3333,
keys.AUC_PR: 0.7689,
keys.PROBABILITY_MEAN_AT_CLASS % 0:
math_ops.reduce_sum(nn.sigmoid(logits[:, 0])) / 2.,
keys.AUC_AT_CLASS % 0: 0.,
keys.AUC_PR_AT_CLASS % 0: 1.,
keys.PROBABILITY_MEAN_AT_CLASS % 1:
math_ops.reduce_sum(nn.sigmoid(logits[:, 1])) / 2.,
keys.AUC_AT_CLASS % 1: 1.,
keys.AUC_PR_AT_CLASS % 1: 1.,
}
self._test_eval(
head=head,
logits=logits,
labels=labels,
expected_loss=expected_loss,
expected_metrics=expected_metrics)
def test_eval_with_classes_for_class_based_metrics_string(self):
head = head_lib.MultiLabelHead(
n_classes=2, label_vocabulary=['a', 'b'],
classes_for_class_based_metrics=['a', 'b'])
logits = np.array([[-1., 1.], [-1.5, 1.5]], dtype=np.float32)
labels = sparse_tensor.SparseTensor(
values=['a', 'a', 'b'],
indices=[[0, 0], [1, 0], [1, 1]],
dense_shape=[2, 2])
labels_onehot = np.array([[1, 0], [1, 1]], dtype=np.int64)
# loss = labels * -log(sigmoid(logits)) +
# (1 - labels) * -log(1 - sigmoid(logits))
# Sum over examples, divide by batch_size.
expected_loss = 0.5 * np.sum(
_sigmoid_cross_entropy(labels=labels_onehot, logits=logits))
keys = metric_keys.MetricKeys
expected_metrics = {
# Average loss over examples.
keys.LOSS_MEAN: expected_loss,
# auc and auc_pr cannot be reliably calculated for only 4 samples, but
# this assert tests that the algorithm remains consistent.
keys.AUC: 0.3333,
keys.AUC_PR: 0.7689,
keys.PROBABILITY_MEAN_AT_NAME % 'a':
math_ops.reduce_sum(nn.sigmoid(logits[:, 0])) / 2.,
keys.AUC_AT_NAME % 'a': 0.,
keys.AUC_PR_AT_NAME % 'a': 1.,
keys.PROBABILITY_MEAN_AT_NAME % 'b':
math_ops.reduce_sum(nn.sigmoid(logits[:, 1])) / 2.,
keys.AUC_AT_NAME % 'b': 1.,
keys.AUC_PR_AT_NAME % 'b': 1.,
}
self._test_eval(
head=head,
logits=logits,
labels=labels,
expected_loss=expected_loss,
expected_metrics=expected_metrics)
def test_eval_with_weights(self):
n_classes = 2
head = head_lib.MultiLabelHead(n_classes, weight_column='example_weights')
logits = np.array([[-10., 10.], [-15., 10.]], dtype=np.float32)
labels = np.array([[1, 0], [1, 1]], dtype=np.int64)
features = {
'x': np.array([[41], [42]], dtype=np.int32),
'example_weights': np.array([[1.], [2.]], dtype=np.float32),
}
# For large logits, sigmoid cross entropy loss is approximated as:
# loss = labels * (logits < 0) * (-logits) +
# (1 - labels) * (logits > 0) * logits =>
# expected_unweighted_loss = [[10., 10.], [15., 0.]]
# Average over classes, weighted sum over examples, divide by batch_size.
# loss = (1 * (10 + 10) / 2 + 2 * (15 + 0) / 2) / 2
expected_loss = 12.5
keys = metric_keys.MetricKeys
expected_metrics = {
# Average loss over weighted examples (denominator is sum(weights)).
keys.LOSS_MEAN: expected_loss * (2. / 3.),
# auc and auc_pr cannot be reliably calculated for only 4 samples, but
# this assert tests that the algorithm remains consistent.
keys.AUC: 0.2000,
keys.AUC_PR: 0.7280,
}
self._test_eval(
head=head,
logits=logits,
labels=labels,
expected_loss=expected_loss,
expected_metrics=expected_metrics,
features=features)
def test_train_create_loss_large_logits(self):
"""Tests head.create_loss for train mode and large logits."""
n_classes = 2
head = head_lib.MultiLabelHead(n_classes, weight_column='example_weights')
logits = np.array([[-10., 10.], [-15., 10.]], dtype=np.float32)
labels = np.array([[1, 0], [1, 1]], dtype=np.int64)
weights = np.array([[1.], [2.]], dtype=np.float32)
features = {
'x': np.array(((42,),), dtype=np.int32),
'example_weights': weights
}
# loss = labels * -log(sigmoid(logits)) +
# (1 - labels) * -log(1 - sigmoid(logits))
# For large logits, this is approximated as:
# loss = labels * (logits < 0) * (-logits) +
# (1 - labels) * (logits > 0) * logits
# expected_unreduced_loss = [[(10. + 10.) / 2.], [(15. + 0.) / 2.]]
# expected_weights = [[1.], [2.]]
expected_training_loss = (1. * (10. + 10.) / 2. + 2. * (15. + 0.) / 2.) / 2.
training_loss = head.loss(
logits=logits,
labels=labels,
features=features,
mode=model_fn.ModeKeys.TRAIN)
self.assertAllClose(
expected_training_loss, self.evaluate(training_loss), atol=1e-4)
def test_train_create_loss_loss_reduction(self):
"""Tests head.create_loss with loss_reduction."""
n_classes = 2
head = head_lib.MultiLabelHead(
n_classes, weight_column='example_weights',
loss_reduction=losses.Reduction.SUM_BY_NONZERO_WEIGHTS)
logits = np.array([[-10., 10.], [-15., 10.]], dtype=np.float32)
labels = np.array([[1, 0], [1, 1]], dtype=np.int64)
weights = np.array([[1.], [2.]], dtype=np.float32)
# loss = labels * -log(sigmoid(logits)) +
# (1 - labels) * -log(1 - sigmoid(logits))
# For large logits, this is approximated as:
# loss = labels * (logits < 0) * (-logits) +
# (1 - labels) * (logits > 0) * logits
# expected_unreduced_loss = [[(10. + 10.) / 2.], [(15. + 0.) / 2.]]
# expected_weights = [[1.], [2.]]
expected_training_loss = (1. * (10. + 10.) / 2. + 2. * (15. + 0.) / 2.) / 2.
training_loss = head.loss(
logits=logits,
labels=labels,
features={
'x': np.array(((42,),), dtype=np.int32),
'example_weights': weights
},
mode=model_fn.ModeKeys.TRAIN)
self.assertAllClose(
expected_training_loss, self.evaluate(training_loss), atol=1e-4)
def test_train_labels_none(self):
"""Tests that error is raised when labels is None."""
head = head_lib.MultiLabelHead(n_classes=2)
with self.assertRaisesRegexp(
ValueError, r'You must provide a labels Tensor\. Given: None\.'):
head.loss(
logits=np.array([[-10., 10.], [-15., 10.]], dtype=np.float32),
labels=None,
features={'x': np.array(((42,),), dtype=np.int32)},
mode=model_fn.ModeKeys.TRAIN)
def test_train_invalid_indicator_labels(self):
head = head_lib.MultiLabelHead(n_classes=2)
logits = np.array([[-10., 10.], [-15., 10.]], dtype=np.float32)
# The value 2 is outside the allowed range.
labels = np.array([[2, 0], [1, 1]], dtype=np.int64)
if context.executing_eagerly():
with self.assertRaisesRegexp(
ValueError,
r'labels must be an integer indicator Tensor with values in '
r'\[0, 1\]'):
head.loss(
logits=logits,
labels=labels,
features={},
mode=model_fn.ModeKeys.TRAIN)
return
def _train_op_fn(loss):
del loss
return control_flow_ops.no_op()
spec = head.create_estimator_spec(
features={},
mode=model_fn.ModeKeys.TRAIN,
logits=logits,
labels=labels,
train_op_fn=_train_op_fn)
with self.cached_session() as sess:
test_lib._initialize_variables(self, spec.scaffold)
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
r'labels must be an integer indicator Tensor with values in '
r'\[0, 1\]'):
sess.run(spec.loss)
def test_train_invalid_sparse_labels(self):
head = head_lib.MultiLabelHead(n_classes=2)
logits = np.array([[-10., 10.], [-15., 10.]], dtype=np.float32)
# The value 2 is outside the allowed range.
labels = sparse_tensor.SparseTensor(
values=[2, 0, 1],
indices=[[0, 0], [1, 0], [1, 1]],
dense_shape=[2, 2])
if context.executing_eagerly():
with self.assertRaisesRegexp(
ValueError,
r'labels must be an integer SparseTensor with values in \[0, 2\)'):
head.loss(
logits=logits,
labels=labels,
features={},
mode=model_fn.ModeKeys.TRAIN)
return
def _train_op_fn(loss):
del loss
return control_flow_ops.no_op()
spec = head.create_estimator_spec(
features={},
mode=model_fn.ModeKeys.TRAIN,
logits=logits,
labels=labels,
train_op_fn=_train_op_fn)
with self.cached_session() as sess:
test_lib._initialize_variables(self, spec.scaffold)
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
r'labels must be an integer SparseTensor with values in \[0, 2\)'):
sess.run(spec.loss)
def _test_train(self, head, logits, labels, expected_loss):
tol = 1e-3
features = {'x': np.array(((42,),), dtype=np.int32)}
if context.executing_eagerly():
loss = head.loss(
logits=logits,
labels=labels,
features=features,
mode=model_fn.ModeKeys.TRAIN)
self.assertIsNotNone(loss)
self.assertAllClose(expected_loss, loss, rtol=tol, atol=tol)
return
expected_train_result = 'my_train_op'
def _train_op_fn(loss):
return string_ops.string_join(
[constant_op.constant(expected_train_result),
string_ops.as_string(loss, precision=3)])
spec = head.create_estimator_spec(
features=features,
mode=model_fn.ModeKeys.TRAIN,
logits=logits,
labels=labels,
train_op_fn=_train_op_fn)
self.assertIsNotNone(spec.loss)
self.assertEqual({}, spec.eval_metric_ops)
self.assertIsNotNone(spec.train_op)
self.assertIsNone(spec.export_outputs)
test_lib._assert_no_hooks(self, spec)
# Assert predictions, loss, train_op, and summaries.
with self.cached_session() as sess:
test_lib._initialize_variables(self, spec.scaffold)
self.assertIsNotNone(spec.scaffold.summary_op)
loss, train_result, summary_str = sess.run((spec.loss, spec.train_op,
spec.scaffold.summary_op))
self.assertAllClose(expected_loss, loss, rtol=tol, atol=tol)
self.assertEqual(
six.b('{0:s}{1:.3f}'.format(expected_train_result, expected_loss)),
train_result)
test_lib._assert_simple_summaries(
self, {metric_keys.MetricKeys.LOSS: expected_loss}, summary_str, tol)
def test_train(self):
head = head_lib.MultiLabelHead(n_classes=2)
logits = np.array([[-10., 10.], [-15., 10.]], dtype=np.float32)
labels = np.array([[1, 0], [1, 1]], dtype=np.int64)
# For large logits, sigmoid cross entropy loss is approximated as:
# loss = labels * (logits < 0) * (-logits) +
# (1 - labels) * (logits > 0) * logits =>
# expected_unweighted_loss = [[10., 10.], [15., 0.]]
# Average over classes, sum over examples, divide by batch_size.
# loss = ((10 + 10) / 2 + (15 + 0) / 2 ) / 2
expected_loss = 8.75
self._test_train(
head=head, logits=logits, labels=labels, expected_loss=expected_loss)
def test_train_sparse_labels(self):
head = head_lib.MultiLabelHead(n_classes=2)
logits = np.array([[-10., 10.], [-15., 10.]], dtype=np.float32)
# Equivalent to multi_hot = [[1, 0], [1, 1]]
labels = sparse_tensor.SparseTensor(
values=[0, 0, 1],
indices=[[0, 0], [1, 0], [1, 1]],
dense_shape=[2, 2])
# For large logits, sigmoid cross entropy loss is approximated as:
# loss = labels * (logits < 0) * (-logits) +
# (1 - labels) * (logits > 0) * logits =>
# expected_unweighted_loss = [[10., 10.], [15., 0.]]
# Average over classes, sum over examples, divide by batch_size.
# loss = ((10 + 10) / 2 + (15 + 0) / 2 ) / 2
expected_loss = 8.75
self._test_train(
head=head, logits=logits, labels=labels, expected_loss=expected_loss)
def test_train_with_label_vocabulary(self):
head = head_lib.MultiLabelHead(
n_classes=2, label_vocabulary=['class0', 'class1'])
logits = np.array([[-10., 10.], [-15., 10.]], dtype=np.float32)
# Equivalent to multi_hot = [[1, 0], [1, 1]]
labels = sparse_tensor.SparseTensor(
values=['class0', 'class0', 'class1'],
indices=[[0, 0], [1, 0], [1, 1]],
dense_shape=[2, 2])
# For large logits, sigmoid cross entropy loss is approximated as:
# loss = labels * (logits < 0) * (-logits) +
# (1 - labels) * (logits > 0) * logits =>
# expected_unweighted_loss = [[10., 10.], [15., 0.]]
# Average over classes, sum over examples, divide by batch_size.
# loss = ((10 + 10) / 2 + (15 + 0) / 2 ) / 2
expected_loss = 8.75
self._test_train(
head=head, logits=logits, labels=labels, expected_loss=expected_loss)
def test_train_with_optimizer(self):
head = head_lib.MultiLabelHead(n_classes=2)
logits = np.array([[-10., 10.], [-15., 10.]], dtype=np.float32)
labels = np.array([[1, 0], [1, 1]], dtype=np.int64)
features = {'x': np.array(((42,),), dtype=np.int32)}
# For large logits, sigmoid cross entropy loss is approximated as:
# loss = labels * (logits < 0) * (-logits) +
# (1 - labels) * (logits > 0) * logits =>
# expected_unweighted_loss = [[10., 10.], [15., 0.]]
# Average over classes, sum over examples, divide by batch_size.
# loss = ((10 + 10) / 2 + (15 + 0) / 2 ) / 2
expected_loss = 8.75
tol = 1e-3
loss = head.loss(
logits=logits,
labels=labels,
features=features,
mode=model_fn.ModeKeys.TRAIN)
self.assertIsNotNone(loss)
self.assertAllClose(expected_loss, self.evaluate(loss), rtol=tol, atol=tol)
if context.executing_eagerly():
return
expected_train_result = 'my_train_op'
class _Optimizer(object):
def minimize(self, loss, global_step):
del global_step
return string_ops.string_join(
[constant_op.constant(expected_train_result),
string_ops.as_string(loss, precision=3)])
spec = head.create_estimator_spec(
features=features,
mode=model_fn.ModeKeys.TRAIN,
logits=logits,
labels=labels,
optimizer=_Optimizer())
with self.cached_session() as sess:
test_lib._initialize_variables(self, spec.scaffold)
loss, train_result = sess.run((spec.loss, spec.train_op))
self.assertAllClose(expected_loss, loss, rtol=tol, atol=tol)
self.assertEqual(
six.b('{0:s}{1:.3f}'.format(expected_train_result, expected_loss)),
train_result)
def test_train_with_regularization_losses(self):
head = head_lib.MultiLabelHead(
n_classes=2, loss_reduction=losses.Reduction.SUM_OVER_BATCH_SIZE)
logits = np.array([[-10., 10.], [-15., 10.]], dtype=np.float32)
labels = np.array([[1, 0], [1, 1]], dtype=np.int64)
regularization_losses = [1.5, 0.5]
features = {'x': np.array(((42,),), dtype=np.int32)}
# For large logits, sigmoid cross entropy loss is approximated as:
# loss = labels * (logits < 0) * (-logits) +
# (1 - labels) * (logits > 0) * logits =>
# expected_unweighted_loss = [[10., 10.], [15., 0.]]
# Average over classes and over batch and add regularization loss.
expected_loss = 35. / 4. + 2.
expected_summaries = {
metric_keys.MetricKeys.LOSS: expected_loss,
metric_keys.MetricKeys.LOSS_REGULARIZATION: 2.,
}
tol = 1e-3
loss = head.loss(
logits=logits,
labels=labels,
features=features,
mode=model_fn.ModeKeys.TRAIN,
regularization_losses=regularization_losses)
self.assertIsNotNone(loss)
self.assertAllClose(expected_loss, self.evaluate(loss), rtol=tol, atol=tol)
if context.executing_eagerly():
return
expected_train_result = 'my_train_op'
def _train_op_fn(loss):
return string_ops.string_join(
[constant_op.constant(expected_train_result),
string_ops.as_string(loss, precision=3)])
spec = head.create_estimator_spec(
features=features,
mode=model_fn.ModeKeys.TRAIN,
logits=logits,
labels=labels,
train_op_fn=_train_op_fn,
regularization_losses=regularization_losses)
# Assert predictions, loss, train_op, and summaries.
with self.cached_session() as sess:
test_lib._initialize_variables(self, spec.scaffold)
self.assertIsNotNone(spec.scaffold.summary_op)
loss, train_result, summary_str = sess.run((spec.loss, spec.train_op,
spec.scaffold.summary_op))
self.assertAllClose(expected_loss, loss, rtol=tol, atol=tol)
self.assertEqual(
six.b('{0:s}{1:.3f}'.format(expected_train_result, expected_loss)),
train_result)
test_lib._assert_simple_summaries(
self, expected_summaries, summary_str, tol)
def test_train_with_weights(self):
n_classes = 2
head = head_lib.MultiLabelHead(n_classes, weight_column='example_weights')
logits = np.array([[-10., 10.], [-15., 10.]], dtype=np.float32)
labels = np.array([[1, 0], [1, 1]], dtype=np.int64)
features = {
'x': np.array([[41], [42]], dtype=np.int32),
'example_weights': np.array([[1.], [2.]], dtype=np.float32),
}
# For large logits, sigmoid cross entropy loss is approximated as:
# loss = labels * (logits < 0) * (-logits) +
# (1 - labels) * (logits > 0) * logits =>
# expected_unweighted_loss = [[10., 10.], [15., 0.]]
# Average over classes, weighted sum over examples, divide by batch_size.
# loss = (1 * (10 + 10) / 2 + 2 * (15 + 0) / 2) / 2
expected_loss = 12.5
tol = 1e-3
loss = head.loss(
logits=logits,
labels=labels,
features=features,
mode=model_fn.ModeKeys.TRAIN)
self.assertIsNotNone(loss)
self.assertAllClose(expected_loss, self.evaluate(loss), rtol=tol, atol=tol)
if context.executing_eagerly():
return
expected_train_result = 'my_train_op'
def _train_op_fn(loss):
return string_ops.string_join(
[constant_op.constant(expected_train_result),
string_ops.as_string(loss, precision=3)])
spec = head.create_estimator_spec(
features=features,
mode=model_fn.ModeKeys.TRAIN,
logits=logits,
labels=labels,
train_op_fn=_train_op_fn)
self.assertIsNotNone(spec.loss)
self.assertEqual({}, spec.eval_metric_ops)
self.assertIsNotNone(spec.train_op)
self.assertIsNone(spec.export_outputs)
test_lib._assert_no_hooks(self, spec)
# Assert predictions, loss, train_op, and summaries.
with self.cached_session() as sess:
test_lib._initialize_variables(self, spec.scaffold)
self.assertIsNotNone(spec.scaffold.summary_op)
loss, train_result, summary_str = sess.run((spec.loss, spec.train_op,
spec.scaffold.summary_op))
self.assertAllClose(expected_loss, loss, rtol=tol, atol=tol)
self.assertEqual(
six.b('{0:s}{1:.3f}'.format(expected_train_result, expected_loss)),
train_result)
test_lib._assert_simple_summaries(
self, {metric_keys.MetricKeys.LOSS: expected_loss,}, summary_str, tol)
def test_multi_dim_weighted_train_create_loss(self):
"""Logits and labels of shape [2, 2, 3], weights [2, 2]."""
head = head_lib.MultiLabelHead(n_classes=3, weight_column='weights')
logits = np.array([[[-10., 10., -10.], [10., -10., 10.]],
[[-12., 12., -12.], [12., -12., 12.]]], dtype=np.float32)
labels = np.array([[[1, 0, 0], [1, 0, 0]],
[[0, 1, 1], [0, 1, 1]]], dtype=np.int64)
weights = np.array([[1., 1.5], [2., 2.5]], dtype=np.float32)
# unreduced_loss =
# [[10 + 10 + 0, 0 + 0 + 10], [0 + 0 + 12, 12 + 12 + 0]] / 3
# = [[20/3, 10/3], [4, 8]]
# expected_unreduced_loss = [[[20./3.], [10./3.]], [[4.], [8.]]]
# weights are reshaped to [2, 2, 1] to match logits.
# expected_weights = [[[1.], [1.5]], [[2.], [2.5]]]
# loss = (1*20/3 + 1.5*10/3 + 2*4 + 2.5*8) / 4 = 9.9167
expected_training_loss = 9.9167
training_loss = head.loss(
logits=logits,
labels=labels,
features={'weights': weights},
mode=model_fn.ModeKeys.TRAIN)
atol = 1.e-3
self.assertAllClose(
expected_training_loss, self.evaluate(training_loss), atol=atol)
def test_multi_dim_weighted_train(self):
"""Logits and labels of shape [2, 2, 3], weights [2, 2]."""
head = head_lib.MultiLabelHead(n_classes=3, weight_column='weights')
logits = np.array([[[-10., 10., -10.], [10., -10., 10.]],
[[-12., 12., -12.], [12., -12., 12.]]], dtype=np.float32)
labels = np.array([[[1, 0, 0], [1, 0, 0]],
[[0, 1, 1], [0, 1, 1]]], dtype=np.int64)
weights = np.array([[1., 1.5], [2., 2.5]], dtype=np.float32)
# loss = [[10 + 10 + 0, 0 + 0 + 10], [0 + 0 + 12, 12 + 12 + 0]] / 3
# = [[20/3, 10/3], [4, 8]]
# loss = (1*20/3 + 1.5*10/3 + 2*4 + 2.5*8) / 4 = 9.9167
expected_loss = 9.9167
atol = 1.e-3
loss = head.loss(
logits=logits,
labels=labels,
features={'weights': weights},
mode=model_fn.ModeKeys.TRAIN)
self.assertIsNotNone(loss)
self.assertAllClose(expected_loss, self.evaluate(loss), atol=atol)
if context.executing_eagerly():
return
expected_train_result = 'my_train_op'
def _train_op_fn(loss):
return string_ops.string_join(
[constant_op.constant(expected_train_result),
string_ops.as_string(loss, precision=3)])
spec = head.create_estimator_spec(
features={'weights': weights},
mode=model_fn.ModeKeys.TRAIN,
logits=logits,
labels=labels,
train_op_fn=_train_op_fn)
with self.cached_session() as sess:
test_lib._initialize_variables(self, monitored_session.Scaffold())
loss, train_result = sess.run((spec.loss, spec.train_op))
self.assertAllClose(expected_loss, loss, atol=atol)
self.assertEqual(
six.b('{0:s}{1:.3f}'.format(expected_train_result, expected_loss)),
train_result)
def test_multi_dim_weights_wrong_inner_dim(self):
"""Logits and labels of shape [2, 2, 3], weights [2, 1]."""
head = head_lib.MultiLabelHead(n_classes=3, weight_column='weights')
logits = np.array([[[-10., 10., -10.], [10., -10., 10.]],
[[-12., 12., -12.], [12., -12., 12.]]], dtype=np.float32)
labels = np.array([[[1, 0, 0], [1, 0, 0]],
[[0, 1, 1], [0, 1, 1]]], dtype=np.int64)
weights = np.array([[1.], [2.]], dtype=np.float32)
if context.executing_eagerly():
with self.assertRaisesRegexp(ValueError, 'weights shape'):
head.loss(
logits=logits,
labels=labels,
features={'weights': weights},
mode=model_fn.ModeKeys.TRAIN)
return
def _train_op_fn(loss):
del loss
return control_flow_ops.no_op()
spec = head.create_estimator_spec(
features={'weights': weights},
mode=model_fn.ModeKeys.TRAIN,
logits=logits,
labels=labels,
train_op_fn=_train_op_fn)
with self.cached_session():
test_lib._initialize_variables(self, monitored_session.Scaffold())
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
r'\[logits_shape: \] \[2 2 3\] \[weights_shape: \] \[2 1\]'):
spec.loss.eval()
def test_multi_dim_weights_wrong_outer_dim(self):
"""Logits and labels of shape [2, 2, 3], weights [2, 2, 3]."""
head = head_lib.MultiLabelHead(n_classes=3, weight_column='weights')
logits = np.array([[[-10., 10., -10.], [10., -10., 10.]],
[[-12., 12., -12.], [12., -12., 12.]]], dtype=np.float32)
labels = np.array([[[1, 0, 0], [1, 0, 0]],
[[0, 1, 1], [0, 1, 1]]], dtype=np.int64)
weights = np.array([[[1., 1., 1.], [1.5, 1.5, 1.5]],
[[2., 2., 2.], [2.5, 2.5, 2.5]]], dtype=np.float32)
if context.executing_eagerly():
with self.assertRaisesRegexp(ValueError, 'weights shape'):
head.loss(
logits=logits,
labels=labels,
features={'weights': weights},
mode=model_fn.ModeKeys.TRAIN)
return
weights_placeholder = array_ops.placeholder(dtype=dtypes.float32)
def _train_op_fn(loss):
del loss
return control_flow_ops.no_op()
spec = head.create_estimator_spec(
features={'weights': weights_placeholder},
mode=model_fn.ModeKeys.TRAIN,
logits=logits,
labels=labels,
train_op_fn=_train_op_fn)
with self.cached_session():
test_lib._initialize_variables(self, monitored_session.Scaffold())
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
r'\[logits_shape: \] \[2 2 3\] \[weights_shape: \] \[2 2 3\]'):
spec.loss.eval({weights_placeholder: weights})
def test_multi_dim_weighted_eval(self):
"""Logits and labels of shape [2, 2, 3], weights [2, 2]."""
head = head_lib.MultiLabelHead(n_classes=3, weight_column='weights')
logits = np.array([[[-10., 10., -10.], [10., -10., 10.]],
[[-12., 12., -12.], [12., -12., 12.]]], dtype=np.float32)
labels = np.array([[[1, 0, 0], [1, 0, 0]],
[[0, 1, 1], [0, 1, 1]]], dtype=np.int64)
weights = np.array([[1., 1.5], [2., 2.5]], dtype=np.float32)
# loss = [[10 + 10 + 0, 0 + 0 + 10], [0 + 0 + 12, 12 + 12 + 0]] / 3
# = [[20/3, 10/3], [4, 8]]
# loss = (1*20/3 + 1.5*10/3 + 2*4 + 2.5*8) / 4 = 9.9167
expected_loss = 9.9167
keys = metric_keys.MetricKeys
expected_metrics = {
keys.LOSS_MEAN: expected_loss * (4. / np.sum(weights)),
# auc and auc_pr cannot be reliably calculated for only 4 samples, but
# this assert tests that the algorithm remains consistent.
keys.AUC: 0.4977,
keys.AUC_PR: 0.5461,
}
self._test_eval(
head=head,
features={'weights': weights},
logits=logits,
labels=labels,
expected_loss=expected_loss,
expected_metrics=expected_metrics)
@test_util.deprecated_graph_mode_only
class MultiLabelHeadForEstimator(test.TestCase):
"""Tests for create_estimator_spec running in Graph mode only."""
def test_predict_with_label_vocabulary(self):
n_classes = 4
head = head_lib.MultiLabelHead(
n_classes, label_vocabulary=['foo', 'bar', 'foobar', 'barfoo'])
logits = np.array(
[[0., 1., 2., -1.], [-1., -2., -3., 1.]], dtype=np.float32)
expected_export_classes = [[b'foo', b'bar', b'foobar', b'barfoo']] * 2
spec = head.create_estimator_spec(
features={'x': np.array(((42,),), dtype=np.int32)},
mode=model_fn.ModeKeys.PREDICT,
logits=logits)
with self.cached_session() as sess:
test_lib._initialize_variables(self, spec.scaffold)
self.assertAllEqual(
expected_export_classes,
sess.run(spec.export_outputs[test_lib._DEFAULT_SERVING_KEY].classes))
def test_train_with_update_ops(self):
with ops.Graph().as_default():
w = variables.Variable(1)
update_op = w.assign_add(1)
t = variables.Variable('')
expected_train_result = b'my_train_op'
def _train_op_fn(loss):
del loss
return t.assign(expected_train_result)
head = head_lib.MultiLabelHead(n_classes=2, update_ops=[update_op])
spec = head.create_estimator_spec(
features={'x': np.array(((42,),), dtype=np.int32)},
mode=model_fn.ModeKeys.TRAIN,
logits=np.array([[-10., 10.], [-15., 10.]], dtype=np.float32),
labels=np.array([[1, 0], [1, 1]], dtype=np.int64),
train_op_fn=_train_op_fn)
with self.cached_session() as sess:
test_lib._initialize_variables(self, spec.scaffold)
sess.run(spec.train_op)
w_value, t_value = sess.run([w, t])
self.assertEqual(2, w_value)
self.assertEqual(expected_train_result, t_value)
def test_lookup_tables_in_graph(self):
n_classes = 2
head = head_lib.MultiLabelHead(
n_classes=n_classes, label_vocabulary=['class0', 'class1'])
feature_columns = [feature_column.numeric_column('x')]
# Create dnn estimator.
est = dnn.DNNEstimator(
head=head,
hidden_units=(2, 2),
feature_columns=feature_columns)
def input_fn():
return (
{'x': np.array(((42,), (43,),), dtype=np.int32)},
np.array([[1, 0], [1, 1]], dtype=np.int64))
# Train.
num_steps = 1
est.train(input_fn, steps=num_steps)
# Eval.
eval_results = est.evaluate(input_fn, steps=num_steps)
self.assertEqual(num_steps, eval_results[ops.GraphKeys.GLOBAL_STEP])
self.assertIn('loss', six.iterkeys(eval_results))
# Predict.
est.predict(input_fn)
if __name__ == '__main__':
test.main()
| 40.170322
| 83
| 0.631922
| 7,139
| 54,953
| 4.661717
| 0.052949
| 0.019982
| 0.03155
| 0.033053
| 0.857873
| 0.806971
| 0.778245
| 0.742127
| 0.716647
| 0.704688
| 0
| 0.040336
| 0.230797
| 54,953
| 1,367
| 84
| 40.199707
| 0.746984
| 0.155151
| 0
| 0.713483
| 0
| 0
| 0.045109
| 0.002144
| 0
| 0
| 0
| 0
| 0.098315
| 1
| 0.064607
| false
| 0
| 0.026217
| 0.004682
| 0.117978
| 0.000936
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a7616d732a095b6805fd205ad07e70ebecc4b77
| 2,578
|
py
|
Python
|
ns-allinone-3.27/ns-3.27/src/point-to-point-layout/bindings/callbacks_list.py
|
zack-braun/4607_NS
|
43c8fb772e5552fb44bd7cd34173e73e3fb66537
|
[
"MIT"
] | 93
|
2019-04-21T08:22:26.000Z
|
2022-03-30T04:26:29.000Z
|
ns-allinone-3.27/ns-3.27/src/point-to-point-layout/bindings/callbacks_list.py
|
zack-braun/4607_NS
|
43c8fb772e5552fb44bd7cd34173e73e3fb66537
|
[
"MIT"
] | 12
|
2019-04-19T16:39:58.000Z
|
2021-06-22T13:18:32.000Z
|
ns-allinone-3.27/ns-3.27/src/point-to-point-layout/bindings/callbacks_list.py
|
zack-braun/4607_NS
|
43c8fb772e5552fb44bd7cd34173e73e3fb66537
|
[
"MIT"
] | 21
|
2019-05-27T19:36:12.000Z
|
2021-07-26T02:37:41.000Z
|
callback_classes = [
['ns3::ObjectBase *', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'const ns3::Address &', 'const ns3::Address &', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<const ns3::Packet>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'std::basic_string<char>', 'ns3::Ptr<const ns3::Packet>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Socket>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['bool', 'ns3::Ptr<ns3::Socket>', 'const ns3::Address &', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Socket>', 'const ns3::Address &', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Socket>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'const ns3::Ipv4Header &', 'ns3::Ptr<const ns3::Packet>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<const ns3::Packet>', 'ns3::Ptr<ns3::Ipv4>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'const ns3::Ipv4Header &', 'ns3::Ptr<const ns3::Packet>', 'ns3::Ipv4L3Protocol::DropReason', 'ns3::Ptr<ns3::Ipv4>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<const ns3::Packet>', 'ns3::Ptr<ns3::Ipv6>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'const ns3::Ipv6Header &', 'ns3::Ptr<const ns3::Packet>', 'ns3::Ipv6L3Protocol::DropReason', 'ns3::Ptr<ns3::Ipv6>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'const ns3::Ipv6Header &', 'ns3::Ptr<const ns3::Packet>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
]
| 143.222222
| 210
| 0.581071
| 348
| 2,578
| 4.298851
| 0.08046
| 0.513369
| 0.595588
| 0.86631
| 0.89639
| 0.89639
| 0.875668
| 0.875668
| 0.875668
| 0.875668
| 0
| 0.067811
| 0.096199
| 2,578
| 17
| 211
| 151.647059
| 0.574249
| 0
| 0
| 0
| 0
| 0
| 0.723429
| 0.094259
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
8a8ab734aecd28ea8e6592ee54d674444ffa2c99
| 12,043
|
py
|
Python
|
kolibri/logger/migrations/0001_initial.py
|
jonboiser/kolibri
|
8ea2febc1739ac772007aae4084f0226dfb4ed40
|
[
"MIT"
] | null | null | null |
kolibri/logger/migrations/0001_initial.py
|
jonboiser/kolibri
|
8ea2febc1739ac772007aae4084f0226dfb4ed40
|
[
"MIT"
] | 5
|
2016-01-22T18:43:44.000Z
|
2019-07-25T20:34:16.000Z
|
kolibri/logger/migrations/0001_initial.py
|
jonboiser/kolibri
|
8ea2febc1739ac772007aae4084f0226dfb4ed40
|
[
"MIT"
] | 1
|
2019-11-12T14:00:30.000Z
|
2019-11-12T14:00:30.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2017-05-16 22:32
from __future__ import unicode_literals
import django.core.validators
import django.db.models.deletion
import jsonfield.fields
import morango.utils.uuids
from django.db import migrations
from django.db import models
import kolibri.content.models
class Migration(migrations.Migration):
initial = True
dependencies = [
('exams', '__first__'),
('kolibriauth', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='AttemptLog',
fields=[
('id', morango.utils.uuids.UUIDField(editable=False, primary_key=True, serialize=False)),
('_morango_dirty_bit', models.BooleanField(default=True, editable=False)),
('_morango_source_id', models.CharField(editable=False, max_length=96)),
('_morango_partition', models.CharField(editable=False, max_length=128)),
('item', models.CharField(max_length=200)),
('start_timestamp', models.DateTimeField()),
('end_timestamp', models.DateTimeField()),
('completion_timestamp', models.DateTimeField(blank=True, null=True)),
('time_spent', models.FloatField(default=0.0, help_text='(in seconds)', validators=[django.core.validators.MinValueValidator(0)])),
('complete', models.BooleanField(default=False)),
('correct', models.FloatField(validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(1)])),
('hinted', models.BooleanField(default=False)),
('answer', jsonfield.fields.JSONField(blank=True, default={}, null=True)),
('simple_answer', models.CharField(blank=True, max_length=200)),
('interaction_history', jsonfield.fields.JSONField(blank=True, default=[])),
('dataset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='kolibriauth.FacilityDataset')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='ContentSessionLog',
fields=[
('id', morango.utils.uuids.UUIDField(editable=False, primary_key=True, serialize=False)),
('_morango_dirty_bit', models.BooleanField(default=True, editable=False)),
('_morango_source_id', models.CharField(editable=False, max_length=96)),
('_morango_partition', models.CharField(editable=False, max_length=128)),
('content_id', kolibri.content.models.UUIDField(db_index=True)),
('channel_id', kolibri.content.models.UUIDField()),
('start_timestamp', models.DateTimeField()),
('end_timestamp', models.DateTimeField(blank=True, null=True)),
('time_spent', models.FloatField(default=0.0, help_text='(in seconds)', validators=[django.core.validators.MinValueValidator(0)])),
('progress', models.FloatField(default=0, validators=[django.core.validators.MinValueValidator(0)])),
('kind', models.CharField(max_length=200)),
('extra_fields', jsonfield.fields.JSONField(blank=True, default={})),
('dataset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='kolibriauth.FacilityDataset')),
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='kolibriauth.FacilityUser')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='ContentSummaryLog',
fields=[
('id', morango.utils.uuids.UUIDField(editable=False, primary_key=True, serialize=False)),
('_morango_dirty_bit', models.BooleanField(default=True, editable=False)),
('_morango_source_id', models.CharField(editable=False, max_length=96)),
('_morango_partition', models.CharField(editable=False, max_length=128)),
('content_id', kolibri.content.models.UUIDField(db_index=True)),
('channel_id', kolibri.content.models.UUIDField()),
('start_timestamp', models.DateTimeField()),
('end_timestamp', models.DateTimeField(blank=True, null=True)),
('completion_timestamp', models.DateTimeField(blank=True, null=True)),
('time_spent', models.FloatField(default=0.0, help_text='(in seconds)', validators=[django.core.validators.MinValueValidator(0)])),
('progress', models.FloatField(default=0, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(1.01)])),
('kind', models.CharField(max_length=200)),
('extra_fields', jsonfield.fields.JSONField(blank=True, default={})),
('dataset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='kolibriauth.FacilityDataset')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='kolibriauth.FacilityUser')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='ExamAttemptLog',
fields=[
('id', morango.utils.uuids.UUIDField(editable=False, primary_key=True, serialize=False)),
('_morango_dirty_bit', models.BooleanField(default=True, editable=False)),
('_morango_source_id', models.CharField(editable=False, max_length=96)),
('_morango_partition', models.CharField(editable=False, max_length=128)),
('item', models.CharField(max_length=200)),
('start_timestamp', models.DateTimeField()),
('end_timestamp', models.DateTimeField()),
('completion_timestamp', models.DateTimeField(blank=True, null=True)),
('time_spent', models.FloatField(default=0.0, help_text='(in seconds)', validators=[django.core.validators.MinValueValidator(0)])),
('complete', models.BooleanField(default=False)),
('correct', models.FloatField(validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(1)])),
('hinted', models.BooleanField(default=False)),
('answer', jsonfield.fields.JSONField(blank=True, default={}, null=True)),
('simple_answer', models.CharField(blank=True, max_length=200)),
('interaction_history', jsonfield.fields.JSONField(blank=True, default=[])),
('content_id', kolibri.content.models.UUIDField()),
('channel_id', kolibri.content.models.UUIDField()),
('dataset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='kolibriauth.FacilityDataset')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='ExamLog',
fields=[
('id', morango.utils.uuids.UUIDField(editable=False, primary_key=True, serialize=False)),
('_morango_dirty_bit', models.BooleanField(default=True, editable=False)),
('_morango_source_id', models.CharField(editable=False, max_length=96)),
('_morango_partition', models.CharField(editable=False, max_length=128)),
('closed', models.BooleanField(default=False)),
('completion_timestamp', models.DateTimeField(blank=True, null=True)),
('dataset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='kolibriauth.FacilityDataset')),
('exam', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='examlogs', to='exams.Exam')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='kolibriauth.FacilityUser')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='MasteryLog',
fields=[
('id', morango.utils.uuids.UUIDField(editable=False, primary_key=True, serialize=False)),
('_morango_dirty_bit', models.BooleanField(default=True, editable=False)),
('_morango_source_id', models.CharField(editable=False, max_length=96)),
('_morango_partition', models.CharField(editable=False, max_length=128)),
('mastery_criterion', jsonfield.fields.JSONField(default={})),
('start_timestamp', models.DateTimeField()),
('end_timestamp', models.DateTimeField(blank=True, null=True)),
('completion_timestamp', models.DateTimeField(blank=True, null=True)),
('mastery_level', models.IntegerField(validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(10)])),
('complete', models.BooleanField(default=False)),
('dataset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='kolibriauth.FacilityDataset')),
('summarylog', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='masterylogs', to='logger.ContentSummaryLog')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='kolibriauth.FacilityUser')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='UserSessionLog',
fields=[
('id', morango.utils.uuids.UUIDField(editable=False, primary_key=True, serialize=False)),
('_morango_dirty_bit', models.BooleanField(default=True, editable=False)),
('_morango_source_id', models.CharField(editable=False, max_length=96)),
('_morango_partition', models.CharField(editable=False, max_length=128)),
('channels', models.TextField(blank=True)),
('start_timestamp', models.DateTimeField(auto_now_add=True)),
('last_interaction_timestamp', models.DateTimeField(auto_now=True, null=True)),
('pages', models.TextField(blank=True)),
('dataset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='kolibriauth.FacilityDataset')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='kolibriauth.FacilityUser')),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='examattemptlog',
name='examlog',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='attemptlogs', to='logger.ExamLog'),
),
migrations.AddField(
model_name='examattemptlog',
name='user',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='kolibriauth.FacilityUser'),
),
migrations.AddField(
model_name='attemptlog',
name='masterylog',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='attemptlogs', to='logger.MasteryLog'),
),
migrations.AddField(
model_name='attemptlog',
name='sessionlog',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='attemptlogs', to='logger.ContentSessionLog'),
),
migrations.AddField(
model_name='attemptlog',
name='user',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='kolibriauth.FacilityUser'),
),
]
| 59.618812
| 165
| 0.616873
| 1,141
| 12,043
| 6.364592
| 0.119194
| 0.050124
| 0.038557
| 0.060589
| 0.883779
| 0.862435
| 0.825117
| 0.825117
| 0.818094
| 0.818094
| 0
| 0.010829
| 0.240887
| 12,043
| 201
| 166
| 59.915423
| 0.783527
| 0.005563
| 0
| 0.744792
| 1
| 0
| 0.159359
| 0.035998
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.041667
| 0
| 0.0625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8a92844fe805b3277bd3d5862ad5871e41d6eed8
| 223
|
py
|
Python
|
__init__.py
|
berkott/mean_average_precision
|
9f4d65036de9c5deca7ec0c13c4d4278ecf8e3c3
|
[
"MIT"
] | 136
|
2020-05-30T18:31:09.000Z
|
2022-03-30T09:10:22.000Z
|
__init__.py
|
berkott/mean_average_precision
|
9f4d65036de9c5deca7ec0c13c4d4278ecf8e3c3
|
[
"MIT"
] | 21
|
2020-06-03T10:23:45.000Z
|
2022-03-31T14:37:28.000Z
|
__init__.py
|
berkott/mean_average_precision
|
9f4d65036de9c5deca7ec0c13c4d4278ecf8e3c3
|
[
"MIT"
] | 24
|
2020-05-31T09:21:45.000Z
|
2022-03-23T14:29:42.000Z
|
from .mean_average_precision.metric_builder import MetricBuilder
from .mean_average_precision.mean_average_precision_2d import MeanAveragePrecision2d
from .mean_average_precision.multiprocessing import MetricMultiprocessing
| 74.333333
| 84
| 0.923767
| 25
| 223
| 7.84
| 0.48
| 0.22449
| 0.408163
| 0.367347
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009434
| 0.049327
| 223
| 3
| 85
| 74.333333
| 0.915094
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
8a981df1620150db2fd6a9506cb769fe97800d4e
| 40,378
|
py
|
Python
|
sdk/python/pulumi_azure/appinsights/web_test.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2018-06-18T00:19:44.000Z
|
2022-02-20T05:32:57.000Z
|
sdk/python/pulumi_azure/appinsights/web_test.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 663
|
2018-06-18T21:08:46.000Z
|
2022-03-31T20:10:11.000Z
|
sdk/python/pulumi_azure/appinsights/web_test.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 41
|
2018-07-19T22:37:38.000Z
|
2022-03-14T10:56:26.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['WebTestArgs', 'WebTest']
@pulumi.input_type
class WebTestArgs:
def __init__(__self__, *,
application_insights_id: pulumi.Input[str],
configuration: pulumi.Input[str],
geo_locations: pulumi.Input[Sequence[pulumi.Input[str]]],
kind: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
frequency: Optional[pulumi.Input[int]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
retry_enabled: Optional[pulumi.Input[bool]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
timeout: Optional[pulumi.Input[int]] = None):
"""
The set of arguments for constructing a WebTest resource.
:param pulumi.Input[str] application_insights_id: The ID of the Application Insights component on which the WebTest operates. Changing this forces a new resource to be created.
:param pulumi.Input[str] configuration: An XML configuration specification for a WebTest ([see here for more information](https://docs.microsoft.com/en-us/rest/api/application-insights/webtests/createorupdate/)).
:param pulumi.Input[Sequence[pulumi.Input[str]]] geo_locations: A list of where to physically run the tests from to give global coverage for accessibility of your application.
:param pulumi.Input[str] kind: = (Required) The kind of web test that this web test watches. Choices are `ping` and `multistep`. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Application Insights WebTest. Changing this forces a new resource
:param pulumi.Input[str] description: Purpose/user defined descriptive test for this WebTest.
:param pulumi.Input[bool] enabled: Is the test actively being monitored.
:param pulumi.Input[int] frequency: Interval in seconds between test runs for this WebTest. Valid options are `300`, `600` and `900`. Defaults to `300`.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created. It needs to correlate with location of parent resource (azurerm_application_insights).
:param pulumi.Input[str] name: Specifies the name of the Application Insights WebTest. Changing this forces a
new resource to be created.
:param pulumi.Input[bool] retry_enabled: Allow for retries should this WebTest fail.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[int] timeout: Seconds until this WebTest will timeout and fail. Default is `30`.
"""
pulumi.set(__self__, "application_insights_id", application_insights_id)
pulumi.set(__self__, "configuration", configuration)
pulumi.set(__self__, "geo_locations", geo_locations)
pulumi.set(__self__, "kind", kind)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if description is not None:
pulumi.set(__self__, "description", description)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if frequency is not None:
pulumi.set(__self__, "frequency", frequency)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if retry_enabled is not None:
pulumi.set(__self__, "retry_enabled", retry_enabled)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if timeout is not None:
pulumi.set(__self__, "timeout", timeout)
@property
@pulumi.getter(name="applicationInsightsId")
def application_insights_id(self) -> pulumi.Input[str]:
"""
The ID of the Application Insights component on which the WebTest operates. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "application_insights_id")
@application_insights_id.setter
def application_insights_id(self, value: pulumi.Input[str]):
pulumi.set(self, "application_insights_id", value)
@property
@pulumi.getter
def configuration(self) -> pulumi.Input[str]:
"""
An XML configuration specification for a WebTest ([see here for more information](https://docs.microsoft.com/en-us/rest/api/application-insights/webtests/createorupdate/)).
"""
return pulumi.get(self, "configuration")
@configuration.setter
def configuration(self, value: pulumi.Input[str]):
pulumi.set(self, "configuration", value)
@property
@pulumi.getter(name="geoLocations")
def geo_locations(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
A list of where to physically run the tests from to give global coverage for accessibility of your application.
"""
return pulumi.get(self, "geo_locations")
@geo_locations.setter
def geo_locations(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "geo_locations", value)
@property
@pulumi.getter
def kind(self) -> pulumi.Input[str]:
"""
= (Required) The kind of web test that this web test watches. Choices are `ping` and `multistep`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: pulumi.Input[str]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group in which to create the Application Insights WebTest. Changing this forces a new resource
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Purpose/user defined descriptive test for this WebTest.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Is the test actively being monitored.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter
def frequency(self) -> Optional[pulumi.Input[int]]:
"""
Interval in seconds between test runs for this WebTest. Valid options are `300`, `600` and `900`. Defaults to `300`.
"""
return pulumi.get(self, "frequency")
@frequency.setter
def frequency(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "frequency", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created. It needs to correlate with location of parent resource (azurerm_application_insights).
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Application Insights WebTest. Changing this forces a
new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="retryEnabled")
def retry_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Allow for retries should this WebTest fail.
"""
return pulumi.get(self, "retry_enabled")
@retry_enabled.setter
def retry_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "retry_enabled", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter
def timeout(self) -> Optional[pulumi.Input[int]]:
"""
Seconds until this WebTest will timeout and fail. Default is `30`.
"""
return pulumi.get(self, "timeout")
@timeout.setter
def timeout(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "timeout", value)
@pulumi.input_type
class _WebTestState:
def __init__(__self__, *,
application_insights_id: Optional[pulumi.Input[str]] = None,
configuration: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
frequency: Optional[pulumi.Input[int]] = None,
geo_locations: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
kind: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
retry_enabled: Optional[pulumi.Input[bool]] = None,
synthetic_monitor_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
timeout: Optional[pulumi.Input[int]] = None):
"""
Input properties used for looking up and filtering WebTest resources.
:param pulumi.Input[str] application_insights_id: The ID of the Application Insights component on which the WebTest operates. Changing this forces a new resource to be created.
:param pulumi.Input[str] configuration: An XML configuration specification for a WebTest ([see here for more information](https://docs.microsoft.com/en-us/rest/api/application-insights/webtests/createorupdate/)).
:param pulumi.Input[str] description: Purpose/user defined descriptive test for this WebTest.
:param pulumi.Input[bool] enabled: Is the test actively being monitored.
:param pulumi.Input[int] frequency: Interval in seconds between test runs for this WebTest. Valid options are `300`, `600` and `900`. Defaults to `300`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] geo_locations: A list of where to physically run the tests from to give global coverage for accessibility of your application.
:param pulumi.Input[str] kind: = (Required) The kind of web test that this web test watches. Choices are `ping` and `multistep`. Changing this forces a new resource to be created.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created. It needs to correlate with location of parent resource (azurerm_application_insights).
:param pulumi.Input[str] name: Specifies the name of the Application Insights WebTest. Changing this forces a
new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Application Insights WebTest. Changing this forces a new resource
:param pulumi.Input[bool] retry_enabled: Allow for retries should this WebTest fail.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[int] timeout: Seconds until this WebTest will timeout and fail. Default is `30`.
"""
if application_insights_id is not None:
pulumi.set(__self__, "application_insights_id", application_insights_id)
if configuration is not None:
pulumi.set(__self__, "configuration", configuration)
if description is not None:
pulumi.set(__self__, "description", description)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if frequency is not None:
pulumi.set(__self__, "frequency", frequency)
if geo_locations is not None:
pulumi.set(__self__, "geo_locations", geo_locations)
if kind is not None:
pulumi.set(__self__, "kind", kind)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if retry_enabled is not None:
pulumi.set(__self__, "retry_enabled", retry_enabled)
if synthetic_monitor_id is not None:
pulumi.set(__self__, "synthetic_monitor_id", synthetic_monitor_id)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if timeout is not None:
pulumi.set(__self__, "timeout", timeout)
@property
@pulumi.getter(name="applicationInsightsId")
def application_insights_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Application Insights component on which the WebTest operates. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "application_insights_id")
@application_insights_id.setter
def application_insights_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "application_insights_id", value)
@property
@pulumi.getter
def configuration(self) -> Optional[pulumi.Input[str]]:
"""
An XML configuration specification for a WebTest ([see here for more information](https://docs.microsoft.com/en-us/rest/api/application-insights/webtests/createorupdate/)).
"""
return pulumi.get(self, "configuration")
@configuration.setter
def configuration(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "configuration", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Purpose/user defined descriptive test for this WebTest.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Is the test actively being monitored.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter
def frequency(self) -> Optional[pulumi.Input[int]]:
"""
Interval in seconds between test runs for this WebTest. Valid options are `300`, `600` and `900`. Defaults to `300`.
"""
return pulumi.get(self, "frequency")
@frequency.setter
def frequency(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "frequency", value)
@property
@pulumi.getter(name="geoLocations")
def geo_locations(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of where to physically run the tests from to give global coverage for accessibility of your application.
"""
return pulumi.get(self, "geo_locations")
@geo_locations.setter
def geo_locations(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "geo_locations", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
= (Required) The kind of web test that this web test watches. Choices are `ping` and `multistep`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created. It needs to correlate with location of parent resource (azurerm_application_insights).
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Application Insights WebTest. Changing this forces a
new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource group in which to create the Application Insights WebTest. Changing this forces a new resource
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="retryEnabled")
def retry_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Allow for retries should this WebTest fail.
"""
return pulumi.get(self, "retry_enabled")
@retry_enabled.setter
def retry_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "retry_enabled", value)
@property
@pulumi.getter(name="syntheticMonitorId")
def synthetic_monitor_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "synthetic_monitor_id")
@synthetic_monitor_id.setter
def synthetic_monitor_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "synthetic_monitor_id", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter
def timeout(self) -> Optional[pulumi.Input[int]]:
"""
Seconds until this WebTest will timeout and fail. Default is `30`.
"""
return pulumi.get(self, "timeout")
@timeout.setter
def timeout(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "timeout", value)
class WebTest(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
application_insights_id: Optional[pulumi.Input[str]] = None,
configuration: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
frequency: Optional[pulumi.Input[int]] = None,
geo_locations: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
kind: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
retry_enabled: Optional[pulumi.Input[bool]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
timeout: Optional[pulumi.Input[int]] = None,
__props__=None):
"""
Manages an Application Insights WebTest.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_insights = azure.appinsights.Insights("exampleInsights",
location="West Europe",
resource_group_name=example_resource_group.name,
application_type="web")
example_web_test = azure.appinsights.WebTest("exampleWebTest",
location=example_insights.location,
resource_group_name=example_resource_group.name,
application_insights_id=example_insights.id,
kind="ping",
frequency=300,
timeout=60,
enabled=True,
geo_locations=[
"us-tx-sn1-azr",
"us-il-ch1-azr",
],
configuration=\"\"\"<WebTest Name="WebTest1" Id="ABD48585-0831-40CB-9069-682EA6BB3583" Enabled="True" CssProjectStructure="" CssIteration="" Timeout="0" WorkItemIds="" xmlns="http://microsoft.com/schemas/VisualStudio/TeamTest/2010" Description="" CredentialUserName="" CredentialPassword="" PreAuthenticate="True" Proxy="default" StopOnError="False" RecordedResultFile="" ResultsLocale="">
<Items>
<Request Method="GET" Guid="a5f10126-e4cd-570d-961c-cea43999a200" Version="1.1" Url="http://microsoft.com" ThinkTime="0" Timeout="300" ParseDependentRequests="True" FollowRedirects="True" RecordResult="True" Cache="False" ResponseTimeGoal="0" Encoding="utf-8" ExpectedHttpStatusCode="200" ExpectedResponseUrl="" ReportingName="" IgnoreHttpStatusCode="False" />
</Items>
</WebTest>
\"\"\")
pulumi.export("webtestId", example_web_test.id)
pulumi.export("webtestsSyntheticId", example_web_test.synthetic_monitor_id)
```
## Import
Application Insights Web Tests can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:appinsights/webTest:WebTest my_test /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.Insights/webTests/my_test
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] application_insights_id: The ID of the Application Insights component on which the WebTest operates. Changing this forces a new resource to be created.
:param pulumi.Input[str] configuration: An XML configuration specification for a WebTest ([see here for more information](https://docs.microsoft.com/en-us/rest/api/application-insights/webtests/createorupdate/)).
:param pulumi.Input[str] description: Purpose/user defined descriptive test for this WebTest.
:param pulumi.Input[bool] enabled: Is the test actively being monitored.
:param pulumi.Input[int] frequency: Interval in seconds between test runs for this WebTest. Valid options are `300`, `600` and `900`. Defaults to `300`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] geo_locations: A list of where to physically run the tests from to give global coverage for accessibility of your application.
:param pulumi.Input[str] kind: = (Required) The kind of web test that this web test watches. Choices are `ping` and `multistep`. Changing this forces a new resource to be created.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created. It needs to correlate with location of parent resource (azurerm_application_insights).
:param pulumi.Input[str] name: Specifies the name of the Application Insights WebTest. Changing this forces a
new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Application Insights WebTest. Changing this forces a new resource
:param pulumi.Input[bool] retry_enabled: Allow for retries should this WebTest fail.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[int] timeout: Seconds until this WebTest will timeout and fail. Default is `30`.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: WebTestArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages an Application Insights WebTest.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_insights = azure.appinsights.Insights("exampleInsights",
location="West Europe",
resource_group_name=example_resource_group.name,
application_type="web")
example_web_test = azure.appinsights.WebTest("exampleWebTest",
location=example_insights.location,
resource_group_name=example_resource_group.name,
application_insights_id=example_insights.id,
kind="ping",
frequency=300,
timeout=60,
enabled=True,
geo_locations=[
"us-tx-sn1-azr",
"us-il-ch1-azr",
],
configuration=\"\"\"<WebTest Name="WebTest1" Id="ABD48585-0831-40CB-9069-682EA6BB3583" Enabled="True" CssProjectStructure="" CssIteration="" Timeout="0" WorkItemIds="" xmlns="http://microsoft.com/schemas/VisualStudio/TeamTest/2010" Description="" CredentialUserName="" CredentialPassword="" PreAuthenticate="True" Proxy="default" StopOnError="False" RecordedResultFile="" ResultsLocale="">
<Items>
<Request Method="GET" Guid="a5f10126-e4cd-570d-961c-cea43999a200" Version="1.1" Url="http://microsoft.com" ThinkTime="0" Timeout="300" ParseDependentRequests="True" FollowRedirects="True" RecordResult="True" Cache="False" ResponseTimeGoal="0" Encoding="utf-8" ExpectedHttpStatusCode="200" ExpectedResponseUrl="" ReportingName="" IgnoreHttpStatusCode="False" />
</Items>
</WebTest>
\"\"\")
pulumi.export("webtestId", example_web_test.id)
pulumi.export("webtestsSyntheticId", example_web_test.synthetic_monitor_id)
```
## Import
Application Insights Web Tests can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:appinsights/webTest:WebTest my_test /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.Insights/webTests/my_test
```
:param str resource_name: The name of the resource.
:param WebTestArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(WebTestArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
application_insights_id: Optional[pulumi.Input[str]] = None,
configuration: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
frequency: Optional[pulumi.Input[int]] = None,
geo_locations: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
kind: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
retry_enabled: Optional[pulumi.Input[bool]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
timeout: Optional[pulumi.Input[int]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = WebTestArgs.__new__(WebTestArgs)
if application_insights_id is None and not opts.urn:
raise TypeError("Missing required property 'application_insights_id'")
__props__.__dict__["application_insights_id"] = application_insights_id
if configuration is None and not opts.urn:
raise TypeError("Missing required property 'configuration'")
__props__.__dict__["configuration"] = configuration
__props__.__dict__["description"] = description
__props__.__dict__["enabled"] = enabled
__props__.__dict__["frequency"] = frequency
if geo_locations is None and not opts.urn:
raise TypeError("Missing required property 'geo_locations'")
__props__.__dict__["geo_locations"] = geo_locations
if kind is None and not opts.urn:
raise TypeError("Missing required property 'kind'")
__props__.__dict__["kind"] = kind
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["retry_enabled"] = retry_enabled
__props__.__dict__["tags"] = tags
__props__.__dict__["timeout"] = timeout
__props__.__dict__["synthetic_monitor_id"] = None
super(WebTest, __self__).__init__(
'azure:appinsights/webTest:WebTest',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
application_insights_id: Optional[pulumi.Input[str]] = None,
configuration: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
frequency: Optional[pulumi.Input[int]] = None,
geo_locations: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
kind: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
retry_enabled: Optional[pulumi.Input[bool]] = None,
synthetic_monitor_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
timeout: Optional[pulumi.Input[int]] = None) -> 'WebTest':
"""
Get an existing WebTest resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] application_insights_id: The ID of the Application Insights component on which the WebTest operates. Changing this forces a new resource to be created.
:param pulumi.Input[str] configuration: An XML configuration specification for a WebTest ([see here for more information](https://docs.microsoft.com/en-us/rest/api/application-insights/webtests/createorupdate/)).
:param pulumi.Input[str] description: Purpose/user defined descriptive test for this WebTest.
:param pulumi.Input[bool] enabled: Is the test actively being monitored.
:param pulumi.Input[int] frequency: Interval in seconds between test runs for this WebTest. Valid options are `300`, `600` and `900`. Defaults to `300`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] geo_locations: A list of where to physically run the tests from to give global coverage for accessibility of your application.
:param pulumi.Input[str] kind: = (Required) The kind of web test that this web test watches. Choices are `ping` and `multistep`. Changing this forces a new resource to be created.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created. It needs to correlate with location of parent resource (azurerm_application_insights).
:param pulumi.Input[str] name: Specifies the name of the Application Insights WebTest. Changing this forces a
new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Application Insights WebTest. Changing this forces a new resource
:param pulumi.Input[bool] retry_enabled: Allow for retries should this WebTest fail.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[int] timeout: Seconds until this WebTest will timeout and fail. Default is `30`.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _WebTestState.__new__(_WebTestState)
__props__.__dict__["application_insights_id"] = application_insights_id
__props__.__dict__["configuration"] = configuration
__props__.__dict__["description"] = description
__props__.__dict__["enabled"] = enabled
__props__.__dict__["frequency"] = frequency
__props__.__dict__["geo_locations"] = geo_locations
__props__.__dict__["kind"] = kind
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["retry_enabled"] = retry_enabled
__props__.__dict__["synthetic_monitor_id"] = synthetic_monitor_id
__props__.__dict__["tags"] = tags
__props__.__dict__["timeout"] = timeout
return WebTest(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="applicationInsightsId")
def application_insights_id(self) -> pulumi.Output[str]:
"""
The ID of the Application Insights component on which the WebTest operates. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "application_insights_id")
@property
@pulumi.getter
def configuration(self) -> pulumi.Output[str]:
"""
An XML configuration specification for a WebTest ([see here for more information](https://docs.microsoft.com/en-us/rest/api/application-insights/webtests/createorupdate/)).
"""
return pulumi.get(self, "configuration")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
Purpose/user defined descriptive test for this WebTest.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def enabled(self) -> pulumi.Output[Optional[bool]]:
"""
Is the test actively being monitored.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter
def frequency(self) -> pulumi.Output[Optional[int]]:
"""
Interval in seconds between test runs for this WebTest. Valid options are `300`, `600` and `900`. Defaults to `300`.
"""
return pulumi.get(self, "frequency")
@property
@pulumi.getter(name="geoLocations")
def geo_locations(self) -> pulumi.Output[Sequence[str]]:
"""
A list of where to physically run the tests from to give global coverage for accessibility of your application.
"""
return pulumi.get(self, "geo_locations")
@property
@pulumi.getter
def kind(self) -> pulumi.Output[str]:
"""
= (Required) The kind of web test that this web test watches. Choices are `ping` and `multistep`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created. It needs to correlate with location of parent resource (azurerm_application_insights).
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Specifies the name of the Application Insights WebTest. Changing this forces a
new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the resource group in which to create the Application Insights WebTest. Changing this forces a new resource
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter(name="retryEnabled")
def retry_enabled(self) -> pulumi.Output[Optional[bool]]:
"""
Allow for retries should this WebTest fail.
"""
return pulumi.get(self, "retry_enabled")
@property
@pulumi.getter(name="syntheticMonitorId")
def synthetic_monitor_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "synthetic_monitor_id")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def timeout(self) -> pulumi.Output[Optional[int]]:
"""
Seconds until this WebTest will timeout and fail. Default is `30`.
"""
return pulumi.get(self, "timeout")
| 48.883777
| 401
| 0.660632
| 4,764
| 40,378
| 5.438077
| 0.063602
| 0.086193
| 0.066469
| 0.046706
| 0.936697
| 0.918825
| 0.895048
| 0.878681
| 0.868568
| 0.858725
| 0
| 0.009753
| 0.235673
| 40,378
| 825
| 402
| 48.94303
| 0.829693
| 0.409579
| 0
| 0.776316
| 1
| 0
| 0.091395
| 0.016077
| 0
| 0
| 0
| 0
| 0
| 1
| 0.164474
| false
| 0.002193
| 0.010965
| 0.004386
| 0.274123
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8acba659efd3508a1bcfa422aa128b7dd479987c
| 16,219
|
py
|
Python
|
forecasting/long_term_forecasting.py
|
nareshram256/EnergyManagementSystem
|
2a48ba3b9bf7ff3003c197ee43ea9efbfbe42baa
|
[
"MIT"
] | 9
|
2020-04-24T14:34:16.000Z
|
2022-01-25T07:16:03.000Z
|
forecasting/long_term_forecasting.py
|
casemsee/EnergyManagementSystem
|
2a48ba3b9bf7ff3003c197ee43ea9efbfbe42baa
|
[
"MIT"
] | null | null | null |
forecasting/long_term_forecasting.py
|
casemsee/EnergyManagementSystem
|
2a48ba3b9bf7ff3003c197ee43ea9efbfbe42baa
|
[
"MIT"
] | 7
|
2019-09-19T13:26:02.000Z
|
2021-11-27T09:53:54.000Z
|
# Long_term forecasting for local energy management system
# This function can be replaced with other forecasting engining
from modelling.database.database_format import db_long_term_forecasting,half_hourly_history_data
from configuration.configuration_time_line import default_look_ahead_time_step, default_time
import random
def blank_forecasting_result(*args):
Target_time = args[0]
default_result = db_long_term_forecasting \
(TIME_STAMP=Target_time,
AC_PD=0,
NAC_PD=0,
DC_PD=0,
NDC_PD=0,
PV_PG=0,
WP_PG=0,
PRICE=0, )
return default_result
def long_term_forecasting_pv(*args):
# Short term forecasting for photovoltaic
session = args[0]
Target_Time = args[1]
if session.query(db_long_term_forecasting).filter(
db_long_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
# The forecasting result dose not exist, a dynamic forecasting will be triggered!
for i in range(default_look_ahead_time_step["Look_ahead_time_uc_time_step"]):
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_uc"])
session.add(blank_row)
session.commit()
PV_PG = []
for i in range(default_look_ahead_time_step["Look_ahead_time_uc_time_step"]):
row_source = session.query(db_long_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_uc"]).first()
PV_PG.append(random.random())
try:
row = session.query(db_long_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_uc"]).first()
row.PV_PG = PV_PG[i]
except:
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_uc"])
session.add(blank_row)
session.commit()
row = session.query(db_long_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_uc"]).first()
row.PV_PG = PV_PG[i]
session.commit()
return PV_PG
def long_term_forecasting_wp(*args):
# Short term forecasting for wind power
session = args[0]
Target_Time = args[1]
if session.query(db_long_term_forecasting).filter(
db_long_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
# The forecasting result dose not exist, a dynamic forecasting will be triggered!
for i in range(default_look_ahead_time_step["Look_ahead_time_uc_time_step"]):
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_uc"])
session.add(blank_row)
session.commit()
WP_PG = []
for i in range(default_look_ahead_time_step["Look_ahead_time_uc_time_step"]):
WP_PG.append(random.random())
try:
row = session.query(db_long_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_uc"]).first()
row.WP_PG = WP_PG[i]
except:
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_uc"])
session.add(blank_row)
session.commit()
row = session.query(db_long_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_uc"]).first()
row.WP_PG = WP_PG[i]
session.commit()
return WP_PG
def long_term_forecasting_load_ac(*args):
# Short term forecasting for critical AC load
session = args[0]
Target_Time = args[1]
if session.query(db_long_term_forecasting).filter(
db_long_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
# The forecasting result dose not exist, a dynamic forecasting will be triggered!
for i in range(default_look_ahead_time_step["Look_ahead_time_uc_time_step"]):
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
AC_PD = []
for i in range(default_look_ahead_time_step["Look_ahead_time_uc_time_step"]):
AC_PD.append(random.random())
try:
row = session.query(db_long_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_uc"]).first()
row.AC_PD = AC_PD[i]
except:
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
row = session.query(db_long_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_uc"]).first()
row.AC_PD = AC_PD[i]
session.commit()
return AC_PD
def long_term_forecasting_load_uac(*args):
# Short term forecasting for non-critical AC load
session = args[0]
Target_Time = args[1]
if session.query(db_long_term_forecasting).filter(
db_long_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
# The forecasting result dose not exist, a dynamic forecasting will be triggered!
for i in range(default_look_ahead_time_step["Look_ahead_time_uc_time_step"]):
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_uc"])
session.add(blank_row)
session.commit()
UAC_PD = []
for i in range(default_look_ahead_time_step["Look_ahead_time_uc_time_step"]):
UAC_PD.append(random.random())
try:
row = session.query(db_long_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_uc"]).first()
row.UAC_PD = UAC_PD[i]
except:
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_uc"])
session.add(blank_row)
session.commit()
row = session.query(db_long_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_uc"]).first()
row.UAC_PD = UAC_PD[i]
session.commit()
return UAC_PD
def long_term_forecasting_load_dc(*args):
# Short term forecasting for critical DC load
session = args[0]
Target_Time = args[1]
if session.query(db_long_term_forecasting).filter(
db_long_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
# The forecasting result dose not exist, a dynamic forecasting will be triggered!
for i in range(default_look_ahead_time_step["Look_ahead_time_uc_time_step"]):
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_uc"])
session.add(blank_row)
session.commit()
DC_PD = []
for i in range(default_look_ahead_time_step["Look_ahead_time_uc_time_step"]):
DC_PD.append(random.random())
try:
row = session.query(db_long_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_uc"]).first()
row.DC_PD = DC_PD[i]
except:
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_uc"])
session.add(blank_row)
session.commit()
row = session.query(db_long_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_uc"]).first()
row.DC_PD = DC_PD[i]
session.commit()
return DC_PD
def long_term_forecasting_load_udc(*args):
# Short term forecasting for non-critical DC load
session = args[0]
Target_Time = args[1]
if session.query(db_long_term_forecasting).filter(
db_long_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
# The forecasting result dose not exist, a dynamic forecasting will be triggered!
for i in range(default_look_ahead_time_step["Look_ahead_time_uc_time_step"]):
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_uc"])
session.add(blank_row)
session.commit()
UDC_PD = []
for i in range(default_look_ahead_time_step["Look_ahead_time_uc_time_step"]):
UDC_PD.append(random.random())
try:
row = session.query(db_long_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_uc"]).first()
row.UDC_PD = UDC_PD[i]
except:
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_uc"])
session.add(blank_row)
session.commit()
row = session.query(db_long_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_uc"]).first()
row.UDC_PD = UDC_PD[i]
session.commit()
return UDC_PD
def long_term_forecasting_pv_history(*args):
# Short term forecasting for photovoltaic
session = args[0]
session_source = args[1]
Target_Time = args[2]
if session.query(db_long_term_forecasting).filter(
db_long_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
# The forecasting result dose not exist, a dynamic forecasting will be triggered!
for i in range(default_look_ahead_time_step["Look_ahead_time_uc_time_step"]):
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_uc"])
session.add(blank_row)
session.commit()
PV_PG = []
for i in range(default_look_ahead_time_step["Look_ahead_time_uc_time_step"]):
row_source = session_source.query(half_hourly_history_data).filter_by(TIME_STAMP = int((Target_Time-default_time["Base_time"])/default_time["Time_step_uc"]) + i).first()
PV_PG.append(row_source.PV_PG)
try:
row = session.query(db_long_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_uc"]).first()
row.PV_PG = PV_PG[i]
except:
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_uc"])
session.add(blank_row)
session.commit()
row = session.query(db_long_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_uc"]).first()
row.PV_PG = PV_PG[i]
session.commit()
session_source.close()
return PV_PG
def long_term_forecasting_wp_history(*args):
# Short term forecasting for wind power
session = args[0]
session_source = args[1]
Target_Time = args[2]
if session.query(db_long_term_forecasting).filter(
db_long_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
# The forecasting result dose not exist, a dynamic forecasting will be triggered!
for i in range(default_look_ahead_time_step["Look_ahead_time_uc_time_step"]):
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_uc"])
session.add(blank_row)
session.commit()
WP_PG = []
for i in range(default_look_ahead_time_step["Look_ahead_time_uc_time_step"]):
row_source = session_source.query(half_hourly_history_data).filter_by(
TIME_STAMP=int((Target_Time-default_time["Base_time"])/default_time["Time_step_uc"]) + i).first()
WP_PG.append(row_source.WP_PG)
try:
row = session.query(db_long_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_uc"]).first()
row.WP_PG = WP_PG[i]
except:
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_uc"])
session.add(blank_row)
session.commit()
row = session.query(db_long_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_uc"]).first()
row.WP_PG = WP_PG[i]
session.commit()
session_source.close()
return WP_PG
def long_term_forecasting_load_ac_history(*args):
# Short term forecasting for critical AC load
session = args[0]
session_source = args[1]
Target_Time = args[2]
if session.query(db_long_term_forecasting).filter(
db_long_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
# The forecasting result dose not exist, a dynamic forecasting will be triggered!
for i in range(default_look_ahead_time_step["Look_ahead_time_uc_time_step"]):
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
AC_PD = []
for i in range(default_look_ahead_time_step["Look_ahead_time_uc_time_step"]):
row_source = session_source.query(half_hourly_history_data).filter_by(
TIME_STAMP=int((Target_Time-default_time["Base_time"])/default_time["Time_step_uc"]) + i).first()
AC_PD.append(row_source.AC_PD)
try:
row = session.query(db_long_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_uc"]).first()
row.AC_PD = AC_PD[i]
except:
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
row = session.query(db_long_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_uc"]).first()
row.AC_PD = AC_PD[i]
session.commit()
session_source.close()
return AC_PD
def long_term_forecasting_load_nac_history(*args):
# Short term forecasting for non-critical AC load
session = args[0]
session_source = args[1]
Target_Time = args[2]
if session.query(db_long_term_forecasting).filter(
db_long_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
# The forecasting result dose not exist, a dynamic forecasting will be triggered!
for i in range(default_look_ahead_time_step["Look_ahead_time_uc_time_step"]):
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_uc"])
session.add(blank_row)
session.commit()
NAC_PD = []
for i in range(default_look_ahead_time_step["Look_ahead_time_uc_time_step"]):
row_source = session_source.query(half_hourly_history_data).filter_by(
TIME_STAMP=int((Target_Time-default_time["Base_time"])/default_time["Time_step_uc"]) + i).first()
NAC_PD.append(row_source.NAC_PD)
try:
row = session.query(db_long_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_uc"]).first()
row.NAC_PD = NAC_PD[i]
except:
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_uc"])
session.add(blank_row)
session.commit()
row = session.query(db_long_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_uc"]).first()
row.UAC_PD = NAC_PD[i]
session.commit()
session_source.close()
return NAC_PD
def long_term_forecasting_load_dc_history(*args):
# Short term forecasting for critical DC load
session = args[0]
session_source = args[1]
Target_Time = args[2]
if session.query(db_long_term_forecasting).filter(
db_long_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
# The forecasting result dose not exist, a dynamic forecasting will be triggered!
for i in range(default_look_ahead_time_step["Look_ahead_time_uc_time_step"]):
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_uc"])
session.add(blank_row)
session.commit()
DC_PD = []
for i in range(default_look_ahead_time_step["Look_ahead_time_uc_time_step"]):
row_source = session_source.query(half_hourly_history_data).filter_by(
TIME_STAMP=int((Target_Time-default_time["Base_time"])/default_time["Time_step_uc"]) + i).first()
DC_PD.append(row_source.DC_PD)
try:
row = session.query(db_long_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_uc"]).first()
row.DC_PD = DC_PD[i]
except:
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_uc"])
session.add(blank_row)
session.commit()
row = session.query(db_long_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_uc"]).first()
row.DC_PD = DC_PD[i]
session.commit()
session_source.close()
return DC_PD
def long_term_forecasting_load_ndc_history(*args):
# Short term forecasting for non-critical DC load
session = args[0]
session_source = args[1]
Target_Time = args[2]
if session.query(db_long_term_forecasting).filter(
db_long_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
# The forecasting result dose not exist, a dynamic forecasting will be triggered!
for i in range(default_look_ahead_time_step["Look_ahead_time_uc_time_step"]):
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_uc"])
session.add(blank_row)
session.commit()
NDC_PD = []
for i in range(default_look_ahead_time_step["Look_ahead_time_uc_time_step"]):
row_source = session_source.query(half_hourly_history_data).filter_by(
TIME_STAMP=int((Target_Time-default_time["Base_time"])/default_time["Time_step_uc"]) + i).first()
NDC_PD.append(row_source.NDC_PD)
try:
row = session.query(db_long_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_uc"]).first()
row.UDC_PD = NDC_PD[i]
except:
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_uc"])
session.add(blank_row)
session.commit()
row = session.query(db_long_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_uc"]).first()
row.NDC_PD = NDC_PD[i]
session.commit()
session_source.close()
return NDC_PD
| 36.447191
| 171
| 0.759295
| 2,545
| 16,219
| 4.442436
| 0.036149
| 0.073589
| 0.107554
| 0.092429
| 0.949584
| 0.939678
| 0.93844
| 0.931895
| 0.894923
| 0.881302
| 0
| 0.003547
| 0.130773
| 16,219
| 445
| 172
| 36.447191
| 0.798411
| 0.098773
| 0
| 0.844118
| 0
| 0
| 0.09499
| 0.046056
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038235
| false
| 0
| 0.008824
| 0
| 0.085294
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
76d65a5767f0b904b729f0d529c1c027a7b0bf68
| 1,923
|
py
|
Python
|
passwordstate_password_fact/test_passwordstate_password_fact.py
|
KBerstene/ansible-boxuk-modules-passwordstate
|
51df69aa51af6171645e8311c6edf30bc81dcaf8
|
[
"MIT"
] | 3
|
2021-09-22T10:03:42.000Z
|
2022-03-01T15:53:58.000Z
|
passwordstate_password_fact/test_passwordstate_password_fact.py
|
KBerstene/ansible-boxuk-modules-passwordstate
|
51df69aa51af6171645e8311c6edf30bc81dcaf8
|
[
"MIT"
] | null | null | null |
passwordstate_password_fact/test_passwordstate_password_fact.py
|
KBerstene/ansible-boxuk-modules-passwordstate
|
51df69aa51af6171645e8311c6edf30bc81dcaf8
|
[
"MIT"
] | 1
|
2021-11-23T10:28:24.000Z
|
2021-11-23T10:28:24.000Z
|
""" PasswordState Test """
import unittest
from passwordstate_password_fact import Password
from passwordstate_password_fact import PasswordState
from ddt import ddt, data, unpack
import mock
class PasswordTest(unittest.TestCase):
""" PasswordTest """
@mock.patch('requests.get', autospec=True)
def test_gather_facts_id(self, mock_get):
""" gather facts by id """
value = [{"Password": "foo", "Title": "bar",
"UserName": "foobar", "GenericField1": "123",
"PasswordID": 999}]
mock_get.return_value = mock.Mock(status_code=200, json=lambda : value)
module = mock.Mock()
url = "http://passwordstate"
api_key = "abc123xyz"
api = PasswordState(module, url, api_key)
password = Password(api, '123',
{'id': '999', 'field': None, 'field_id': None})
facts = password.gather_facts('fact_name_prefix')
expected = {'fact_name_prefix_password': 'foo', 'fact_name_prefix_username': 'foobar'}
self.assertEqual(expected, facts)
@mock.patch('requests.get', autospec=True)
def test_gather_facts_field(self, mock_get):
""" gather facts by custom field """
value = [{"Password": "foo", "Title": "bar",
"UserName": "foobar", "GenericField1": "123",
"PasswordID": 999}]
mock_get.return_value = mock.Mock(status_code=200, json=lambda : value)
module = mock.Mock()
url = "http://passwordstate"
api_key = "abc123xyz"
api = PasswordState(module, url, api_key)
password = Password(api, '123',
{'id': None, 'field': 'GenericField1', 'field_id': '123'})
facts = password.gather_facts('fact_name_prefix')
expected = {'fact_name_prefix_password': 'foo', 'fact_name_prefix_username': 'foobar'}
self.assertEqual(expected, facts)
| 36.980769
| 94
| 0.609464
| 208
| 1,923
| 5.432692
| 0.259615
| 0.058407
| 0.074336
| 0.051327
| 0.815929
| 0.753982
| 0.711504
| 0.711504
| 0.711504
| 0.711504
| 0
| 0.027083
| 0.25117
| 1,923
| 51
| 95
| 37.705882
| 0.757639
| 0.042642
| 0
| 0.722222
| 0
| 0
| 0.223141
| 0.055096
| 0
| 0
| 0
| 0
| 0.055556
| 1
| 0.055556
| false
| 0.472222
| 0.138889
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
0a41286c67bb576e8804adc7392287281fd93099
| 76,119
|
py
|
Python
|
nipyapi/registry/apis/extension_repository_api.py
|
iMajna/nipyapi
|
5480af8fe8c6b470249837835cb1a067abb6678e
|
[
"Apache-2.0"
] | null | null | null |
nipyapi/registry/apis/extension_repository_api.py
|
iMajna/nipyapi
|
5480af8fe8c6b470249837835cb1a067abb6678e
|
[
"Apache-2.0"
] | 1
|
2020-03-16T10:02:46.000Z
|
2020-03-16T13:37:42.000Z
|
nipyapi/registry/apis/extension_repository_api.py
|
iMajna/nipyapi
|
5480af8fe8c6b470249837835cb1a067abb6678e
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Apache NiFi Registry REST API
The REST API provides an interface to a registry with operations for saving, versioning, reading NiFi flows and components.
OpenAPI spec version: 0.7.0
Contact: dev@nifi.apache.org
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class ExtensionRepositoryApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def get_extension_repo_artifacts(self, bucket_name, group_id, **kwargs):
"""
Get extension repo artifacts
Gets the artifacts in the extension repository in the given bucket and group. NOTE: This endpoint is subject to change as NiFi Registry and its REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_extension_repo_artifacts(bucket_name, group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str bucket_name: The bucket name (required)
:param str group_id: The group id (required)
:return: list[ExtensionRepoArtifact]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_extension_repo_artifacts_with_http_info(bucket_name, group_id, **kwargs)
else:
(data) = self.get_extension_repo_artifacts_with_http_info(bucket_name, group_id, **kwargs)
return data
def get_extension_repo_artifacts_with_http_info(self, bucket_name, group_id, **kwargs):
"""
Get extension repo artifacts
Gets the artifacts in the extension repository in the given bucket and group. NOTE: This endpoint is subject to change as NiFi Registry and its REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_extension_repo_artifacts_with_http_info(bucket_name, group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str bucket_name: The bucket name (required)
:param str group_id: The group id (required)
:return: list[ExtensionRepoArtifact]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bucket_name', 'group_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_extension_repo_artifacts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bucket_name' is set
if ('bucket_name' not in params) or (params['bucket_name'] is None):
raise ValueError("Missing the required parameter `bucket_name` when calling `get_extension_repo_artifacts`")
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `get_extension_repo_artifacts`")
collection_formats = {}
path_params = {}
if 'bucket_name' in params:
path_params['bucketName'] = params['bucket_name']
if 'group_id' in params:
path_params['groupId'] = params['group_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['tokenAuth', 'Authorization']
return self.api_client.call_api('/extension-repository/{bucketName}/{groupId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ExtensionRepoArtifact]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_extension_repo_buckets(self, **kwargs):
"""
Get extension repo buckets
Gets the names of the buckets the current user is authorized for in order to browse the repo by bucket. NOTE: This endpoint is subject to change as NiFi Registry and its REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_extension_repo_buckets(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: list[ExtensionRepoBucket]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_extension_repo_buckets_with_http_info(**kwargs)
else:
(data) = self.get_extension_repo_buckets_with_http_info(**kwargs)
return data
def get_extension_repo_buckets_with_http_info(self, **kwargs):
"""
Get extension repo buckets
Gets the names of the buckets the current user is authorized for in order to browse the repo by bucket. NOTE: This endpoint is subject to change as NiFi Registry and its REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_extension_repo_buckets_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: list[ExtensionRepoBucket]
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_extension_repo_buckets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['tokenAuth', 'Authorization']
return self.api_client.call_api('/extension-repository', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ExtensionRepoBucket]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_extension_repo_groups(self, bucket_name, **kwargs):
"""
Get extension repo groups
Gets the groups in the extension repository in the given bucket. NOTE: This endpoint is subject to change as NiFi Registry and its REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_extension_repo_groups(bucket_name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str bucket_name: The bucket name (required)
:return: list[ExtensionRepoGroup]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_extension_repo_groups_with_http_info(bucket_name, **kwargs)
else:
(data) = self.get_extension_repo_groups_with_http_info(bucket_name, **kwargs)
return data
def get_extension_repo_groups_with_http_info(self, bucket_name, **kwargs):
"""
Get extension repo groups
Gets the groups in the extension repository in the given bucket. NOTE: This endpoint is subject to change as NiFi Registry and its REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_extension_repo_groups_with_http_info(bucket_name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str bucket_name: The bucket name (required)
:return: list[ExtensionRepoGroup]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bucket_name']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_extension_repo_groups" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bucket_name' is set
if ('bucket_name' not in params) or (params['bucket_name'] is None):
raise ValueError("Missing the required parameter `bucket_name` when calling `get_extension_repo_groups`")
collection_formats = {}
path_params = {}
if 'bucket_name' in params:
path_params['bucketName'] = params['bucket_name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['tokenAuth', 'Authorization']
return self.api_client.call_api('/extension-repository/{bucketName}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ExtensionRepoGroup]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_extension_repo_version(self, bucket_name, group_id, artifact_id, version, **kwargs):
"""
Get extension repo version
Gets information about the version in the given bucket, group, and artifact. NOTE: This endpoint is subject to change as NiFi Registry and its REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_extension_repo_version(bucket_name, group_id, artifact_id, version, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str bucket_name: The bucket name (required)
:param str group_id: The group identifier (required)
:param str artifact_id: The artifact identifier (required)
:param str version: The version (required)
:return: ExtensionRepoVersion
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_extension_repo_version_with_http_info(bucket_name, group_id, artifact_id, version, **kwargs)
else:
(data) = self.get_extension_repo_version_with_http_info(bucket_name, group_id, artifact_id, version, **kwargs)
return data
def get_extension_repo_version_with_http_info(self, bucket_name, group_id, artifact_id, version, **kwargs):
"""
Get extension repo version
Gets information about the version in the given bucket, group, and artifact. NOTE: This endpoint is subject to change as NiFi Registry and its REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_extension_repo_version_with_http_info(bucket_name, group_id, artifact_id, version, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str bucket_name: The bucket name (required)
:param str group_id: The group identifier (required)
:param str artifact_id: The artifact identifier (required)
:param str version: The version (required)
:return: ExtensionRepoVersion
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bucket_name', 'group_id', 'artifact_id', 'version']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_extension_repo_version" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bucket_name' is set
if ('bucket_name' not in params) or (params['bucket_name'] is None):
raise ValueError("Missing the required parameter `bucket_name` when calling `get_extension_repo_version`")
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `get_extension_repo_version`")
# verify the required parameter 'artifact_id' is set
if ('artifact_id' not in params) or (params['artifact_id'] is None):
raise ValueError("Missing the required parameter `artifact_id` when calling `get_extension_repo_version`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `get_extension_repo_version`")
collection_formats = {}
path_params = {}
if 'bucket_name' in params:
path_params['bucketName'] = params['bucket_name']
if 'group_id' in params:
path_params['groupId'] = params['group_id']
if 'artifact_id' in params:
path_params['artifactId'] = params['artifact_id']
if 'version' in params:
path_params['version'] = params['version']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['tokenAuth', 'Authorization']
return self.api_client.call_api('/extension-repository/{bucketName}/{groupId}/{artifactId}/{version}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ExtensionRepoVersion',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_extension_repo_version_content(self, bucket_name, group_id, artifact_id, version, **kwargs):
"""
Get extension repo version content
Gets the binary content of the bundle with the given bucket, group, artifact, and version. NOTE: This endpoint is subject to change as NiFi Registry and its REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_extension_repo_version_content(bucket_name, group_id, artifact_id, version, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str bucket_name: The bucket name (required)
:param str group_id: The group identifier (required)
:param str artifact_id: The artifact identifier (required)
:param str version: The version (required)
:return: list[str]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_extension_repo_version_content_with_http_info(bucket_name, group_id, artifact_id, version, **kwargs)
else:
(data) = self.get_extension_repo_version_content_with_http_info(bucket_name, group_id, artifact_id, version, **kwargs)
return data
def get_extension_repo_version_content_with_http_info(self, bucket_name, group_id, artifact_id, version, **kwargs):
"""
Get extension repo version content
Gets the binary content of the bundle with the given bucket, group, artifact, and version. NOTE: This endpoint is subject to change as NiFi Registry and its REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_extension_repo_version_content_with_http_info(bucket_name, group_id, artifact_id, version, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str bucket_name: The bucket name (required)
:param str group_id: The group identifier (required)
:param str artifact_id: The artifact identifier (required)
:param str version: The version (required)
:return: list[str]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bucket_name', 'group_id', 'artifact_id', 'version']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_extension_repo_version_content" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bucket_name' is set
if ('bucket_name' not in params) or (params['bucket_name'] is None):
raise ValueError("Missing the required parameter `bucket_name` when calling `get_extension_repo_version_content`")
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `get_extension_repo_version_content`")
# verify the required parameter 'artifact_id' is set
if ('artifact_id' not in params) or (params['artifact_id'] is None):
raise ValueError("Missing the required parameter `artifact_id` when calling `get_extension_repo_version_content`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `get_extension_repo_version_content`")
collection_formats = {}
path_params = {}
if 'bucket_name' in params:
path_params['bucketName'] = params['bucket_name']
if 'group_id' in params:
path_params['groupId'] = params['group_id']
if 'artifact_id' in params:
path_params['artifactId'] = params['artifact_id']
if 'version' in params:
path_params['version'] = params['version']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/octet-stream'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['tokenAuth', 'Authorization']
return self.api_client.call_api('/extension-repository/{bucketName}/{groupId}/{artifactId}/{version}/content', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[str]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_extension_repo_version_extension(self, bucket_name, group_id, artifact_id, version, name, **kwargs):
"""
Get extension repo extension
Gets information about the extension with the given name in the given bucket, group, artifact, and version. NOTE: This endpoint is subject to change as NiFi Registry and its REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_extension_repo_version_extension(bucket_name, group_id, artifact_id, version, name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str bucket_name: The bucket name (required)
:param str group_id: The group identifier (required)
:param str artifact_id: The artifact identifier (required)
:param str version: The version (required)
:param str name: The fully qualified name of the extension (required)
:return: Extension
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_extension_repo_version_extension_with_http_info(bucket_name, group_id, artifact_id, version, name, **kwargs)
else:
(data) = self.get_extension_repo_version_extension_with_http_info(bucket_name, group_id, artifact_id, version, name, **kwargs)
return data
def get_extension_repo_version_extension_with_http_info(self, bucket_name, group_id, artifact_id, version, name, **kwargs):
"""
Get extension repo extension
Gets information about the extension with the given name in the given bucket, group, artifact, and version. NOTE: This endpoint is subject to change as NiFi Registry and its REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_extension_repo_version_extension_with_http_info(bucket_name, group_id, artifact_id, version, name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str bucket_name: The bucket name (required)
:param str group_id: The group identifier (required)
:param str artifact_id: The artifact identifier (required)
:param str version: The version (required)
:param str name: The fully qualified name of the extension (required)
:return: Extension
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bucket_name', 'group_id', 'artifact_id', 'version', 'name']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_extension_repo_version_extension" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bucket_name' is set
if ('bucket_name' not in params) or (params['bucket_name'] is None):
raise ValueError("Missing the required parameter `bucket_name` when calling `get_extension_repo_version_extension`")
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `get_extension_repo_version_extension`")
# verify the required parameter 'artifact_id' is set
if ('artifact_id' not in params) or (params['artifact_id'] is None):
raise ValueError("Missing the required parameter `artifact_id` when calling `get_extension_repo_version_extension`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `get_extension_repo_version_extension`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_extension_repo_version_extension`")
collection_formats = {}
path_params = {}
if 'bucket_name' in params:
path_params['bucketName'] = params['bucket_name']
if 'group_id' in params:
path_params['groupId'] = params['group_id']
if 'artifact_id' in params:
path_params['artifactId'] = params['artifact_id']
if 'version' in params:
path_params['version'] = params['version']
if 'name' in params:
path_params['name'] = params['name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['tokenAuth', 'Authorization']
return self.api_client.call_api('/extension-repository/{bucketName}/{groupId}/{artifactId}/{version}/extensions/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Extension',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_extension_repo_version_extension_additional_details_docs(self, bucket_name, group_id, artifact_id, version, name, **kwargs):
"""
Get extension repo extension details
Gets the additional details documentation for the extension with the given name in the given bucket, group, artifact, and version. NOTE: This endpoint is subject to change as NiFi Registry and its REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_extension_repo_version_extension_additional_details_docs(bucket_name, group_id, artifact_id, version, name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str bucket_name: The bucket name (required)
:param str group_id: The group identifier (required)
:param str artifact_id: The artifact identifier (required)
:param str version: The version (required)
:param str name: The fully qualified name of the extension (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_extension_repo_version_extension_additional_details_docs_with_http_info(bucket_name, group_id, artifact_id, version, name, **kwargs)
else:
(data) = self.get_extension_repo_version_extension_additional_details_docs_with_http_info(bucket_name, group_id, artifact_id, version, name, **kwargs)
return data
def get_extension_repo_version_extension_additional_details_docs_with_http_info(self, bucket_name, group_id, artifact_id, version, name, **kwargs):
"""
Get extension repo extension details
Gets the additional details documentation for the extension with the given name in the given bucket, group, artifact, and version. NOTE: This endpoint is subject to change as NiFi Registry and its REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_extension_repo_version_extension_additional_details_docs_with_http_info(bucket_name, group_id, artifact_id, version, name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str bucket_name: The bucket name (required)
:param str group_id: The group identifier (required)
:param str artifact_id: The artifact identifier (required)
:param str version: The version (required)
:param str name: The fully qualified name of the extension (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bucket_name', 'group_id', 'artifact_id', 'version', 'name']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_extension_repo_version_extension_additional_details_docs" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bucket_name' is set
if ('bucket_name' not in params) or (params['bucket_name'] is None):
raise ValueError("Missing the required parameter `bucket_name` when calling `get_extension_repo_version_extension_additional_details_docs`")
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `get_extension_repo_version_extension_additional_details_docs`")
# verify the required parameter 'artifact_id' is set
if ('artifact_id' not in params) or (params['artifact_id'] is None):
raise ValueError("Missing the required parameter `artifact_id` when calling `get_extension_repo_version_extension_additional_details_docs`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `get_extension_repo_version_extension_additional_details_docs`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_extension_repo_version_extension_additional_details_docs`")
collection_formats = {}
path_params = {}
if 'bucket_name' in params:
path_params['bucketName'] = params['bucket_name']
if 'group_id' in params:
path_params['groupId'] = params['group_id']
if 'artifact_id' in params:
path_params['artifactId'] = params['artifact_id']
if 'version' in params:
path_params['version'] = params['version']
if 'name' in params:
path_params['name'] = params['name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['text/html'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['tokenAuth', 'Authorization']
return self.api_client.call_api('/extension-repository/{bucketName}/{groupId}/{artifactId}/{version}/extensions/{name}/docs/additional-details', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_extension_repo_version_extension_docs(self, bucket_name, group_id, artifact_id, version, name, **kwargs):
"""
Get extension repo extension docs
Gets the documentation for the extension with the given name in the given bucket, group, artifact, and version. NOTE: This endpoint is subject to change as NiFi Registry and its REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_extension_repo_version_extension_docs(bucket_name, group_id, artifact_id, version, name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str bucket_name: The bucket name (required)
:param str group_id: The group identifier (required)
:param str artifact_id: The artifact identifier (required)
:param str version: The version (required)
:param str name: The fully qualified name of the extension (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_extension_repo_version_extension_docs_with_http_info(bucket_name, group_id, artifact_id, version, name, **kwargs)
else:
(data) = self.get_extension_repo_version_extension_docs_with_http_info(bucket_name, group_id, artifact_id, version, name, **kwargs)
return data
def get_extension_repo_version_extension_docs_with_http_info(self, bucket_name, group_id, artifact_id, version, name, **kwargs):
"""
Get extension repo extension docs
Gets the documentation for the extension with the given name in the given bucket, group, artifact, and version. NOTE: This endpoint is subject to change as NiFi Registry and its REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_extension_repo_version_extension_docs_with_http_info(bucket_name, group_id, artifact_id, version, name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str bucket_name: The bucket name (required)
:param str group_id: The group identifier (required)
:param str artifact_id: The artifact identifier (required)
:param str version: The version (required)
:param str name: The fully qualified name of the extension (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bucket_name', 'group_id', 'artifact_id', 'version', 'name']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_extension_repo_version_extension_docs" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bucket_name' is set
if ('bucket_name' not in params) or (params['bucket_name'] is None):
raise ValueError("Missing the required parameter `bucket_name` when calling `get_extension_repo_version_extension_docs`")
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `get_extension_repo_version_extension_docs`")
# verify the required parameter 'artifact_id' is set
if ('artifact_id' not in params) or (params['artifact_id'] is None):
raise ValueError("Missing the required parameter `artifact_id` when calling `get_extension_repo_version_extension_docs`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `get_extension_repo_version_extension_docs`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_extension_repo_version_extension_docs`")
collection_formats = {}
path_params = {}
if 'bucket_name' in params:
path_params['bucketName'] = params['bucket_name']
if 'group_id' in params:
path_params['groupId'] = params['group_id']
if 'artifact_id' in params:
path_params['artifactId'] = params['artifact_id']
if 'version' in params:
path_params['version'] = params['version']
if 'name' in params:
path_params['name'] = params['name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['text/html'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['tokenAuth', 'Authorization']
return self.api_client.call_api('/extension-repository/{bucketName}/{groupId}/{artifactId}/{version}/extensions/{name}/docs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_extension_repo_version_extensions(self, bucket_name, group_id, artifact_id, version, **kwargs):
"""
Get extension repo extensions
Gets information about the extensions in the given bucket, group, artifact, and version. NOTE: This endpoint is subject to change as NiFi Registry and its REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_extension_repo_version_extensions(bucket_name, group_id, artifact_id, version, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str bucket_name: The bucket name (required)
:param str group_id: The group identifier (required)
:param str artifact_id: The artifact identifier (required)
:param str version: The version (required)
:return: list[ExtensionMetadata]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_extension_repo_version_extensions_with_http_info(bucket_name, group_id, artifact_id, version, **kwargs)
else:
(data) = self.get_extension_repo_version_extensions_with_http_info(bucket_name, group_id, artifact_id, version, **kwargs)
return data
def get_extension_repo_version_extensions_with_http_info(self, bucket_name, group_id, artifact_id, version, **kwargs):
"""
Get extension repo extensions
Gets information about the extensions in the given bucket, group, artifact, and version. NOTE: This endpoint is subject to change as NiFi Registry and its REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_extension_repo_version_extensions_with_http_info(bucket_name, group_id, artifact_id, version, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str bucket_name: The bucket name (required)
:param str group_id: The group identifier (required)
:param str artifact_id: The artifact identifier (required)
:param str version: The version (required)
:return: list[ExtensionMetadata]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bucket_name', 'group_id', 'artifact_id', 'version']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_extension_repo_version_extensions" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bucket_name' is set
if ('bucket_name' not in params) or (params['bucket_name'] is None):
raise ValueError("Missing the required parameter `bucket_name` when calling `get_extension_repo_version_extensions`")
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `get_extension_repo_version_extensions`")
# verify the required parameter 'artifact_id' is set
if ('artifact_id' not in params) or (params['artifact_id'] is None):
raise ValueError("Missing the required parameter `artifact_id` when calling `get_extension_repo_version_extensions`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `get_extension_repo_version_extensions`")
collection_formats = {}
path_params = {}
if 'bucket_name' in params:
path_params['bucketName'] = params['bucket_name']
if 'group_id' in params:
path_params['groupId'] = params['group_id']
if 'artifact_id' in params:
path_params['artifactId'] = params['artifact_id']
if 'version' in params:
path_params['version'] = params['version']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['tokenAuth', 'Authorization']
return self.api_client.call_api('/extension-repository/{bucketName}/{groupId}/{artifactId}/{version}/extensions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ExtensionMetadata]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_extension_repo_version_sha256(self, bucket_name, group_id, artifact_id, version, **kwargs):
"""
Get extension repo version checksum
Gets the hex representation of the SHA-256 digest for the binary content of the bundle with the given bucket, group, artifact, and version. NOTE: This endpoint is subject to change as NiFi Registry and its REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_extension_repo_version_sha256(bucket_name, group_id, artifact_id, version, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str bucket_name: The bucket name (required)
:param str group_id: The group identifier (required)
:param str artifact_id: The artifact identifier (required)
:param str version: The version (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_extension_repo_version_sha256_with_http_info(bucket_name, group_id, artifact_id, version, **kwargs)
else:
(data) = self.get_extension_repo_version_sha256_with_http_info(bucket_name, group_id, artifact_id, version, **kwargs)
return data
def get_extension_repo_version_sha256_with_http_info(self, bucket_name, group_id, artifact_id, version, **kwargs):
"""
Get extension repo version checksum
Gets the hex representation of the SHA-256 digest for the binary content of the bundle with the given bucket, group, artifact, and version. NOTE: This endpoint is subject to change as NiFi Registry and its REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_extension_repo_version_sha256_with_http_info(bucket_name, group_id, artifact_id, version, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str bucket_name: The bucket name (required)
:param str group_id: The group identifier (required)
:param str artifact_id: The artifact identifier (required)
:param str version: The version (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bucket_name', 'group_id', 'artifact_id', 'version']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_extension_repo_version_sha256" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bucket_name' is set
if ('bucket_name' not in params) or (params['bucket_name'] is None):
raise ValueError("Missing the required parameter `bucket_name` when calling `get_extension_repo_version_sha256`")
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `get_extension_repo_version_sha256`")
# verify the required parameter 'artifact_id' is set
if ('artifact_id' not in params) or (params['artifact_id'] is None):
raise ValueError("Missing the required parameter `artifact_id` when calling `get_extension_repo_version_sha256`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `get_extension_repo_version_sha256`")
collection_formats = {}
path_params = {}
if 'bucket_name' in params:
path_params['bucketName'] = params['bucket_name']
if 'group_id' in params:
path_params['groupId'] = params['group_id']
if 'artifact_id' in params:
path_params['artifactId'] = params['artifact_id']
if 'version' in params:
path_params['version'] = params['version']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['tokenAuth', 'Authorization']
return self.api_client.call_api('/extension-repository/{bucketName}/{groupId}/{artifactId}/{version}/sha256', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_extension_repo_versions(self, bucket_name, group_id, artifact_id, **kwargs):
"""
Get extension repo versions
Gets the versions in the extension repository for the given bucket, group, and artifact. NOTE: This endpoint is subject to change as NiFi Registry and its REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_extension_repo_versions(bucket_name, group_id, artifact_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str bucket_name: The bucket name (required)
:param str group_id: The group identifier (required)
:param str artifact_id: The artifact identifier (required)
:return: list[ExtensionRepoVersionSummary]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_extension_repo_versions_with_http_info(bucket_name, group_id, artifact_id, **kwargs)
else:
(data) = self.get_extension_repo_versions_with_http_info(bucket_name, group_id, artifact_id, **kwargs)
return data
def get_extension_repo_versions_with_http_info(self, bucket_name, group_id, artifact_id, **kwargs):
"""
Get extension repo versions
Gets the versions in the extension repository for the given bucket, group, and artifact. NOTE: This endpoint is subject to change as NiFi Registry and its REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_extension_repo_versions_with_http_info(bucket_name, group_id, artifact_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str bucket_name: The bucket name (required)
:param str group_id: The group identifier (required)
:param str artifact_id: The artifact identifier (required)
:return: list[ExtensionRepoVersionSummary]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bucket_name', 'group_id', 'artifact_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_extension_repo_versions" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bucket_name' is set
if ('bucket_name' not in params) or (params['bucket_name'] is None):
raise ValueError("Missing the required parameter `bucket_name` when calling `get_extension_repo_versions`")
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `get_extension_repo_versions`")
# verify the required parameter 'artifact_id' is set
if ('artifact_id' not in params) or (params['artifact_id'] is None):
raise ValueError("Missing the required parameter `artifact_id` when calling `get_extension_repo_versions`")
collection_formats = {}
path_params = {}
if 'bucket_name' in params:
path_params['bucketName'] = params['bucket_name']
if 'group_id' in params:
path_params['groupId'] = params['group_id']
if 'artifact_id' in params:
path_params['artifactId'] = params['artifact_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['tokenAuth', 'Authorization']
return self.api_client.call_api('/extension-repository/{bucketName}/{groupId}/{artifactId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ExtensionRepoVersionSummary]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_global_extension_repo_version_sha256(self, group_id, artifact_id, version, **kwargs):
"""
Get global extension repo version checksum
Gets the hex representation of the SHA-256 digest for the binary content with the given bucket, group, artifact, and version. Since the same group-artifact-version can exist in multiple buckets, this will return the checksum of the first one returned. This will be consistent since the checksum must be the same when existing in multiple buckets. NOTE: This endpoint is subject to change as NiFi Registry and its REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_global_extension_repo_version_sha256(group_id, artifact_id, version, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str group_id: The group identifier (required)
:param str artifact_id: The artifact identifier (required)
:param str version: The version (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_global_extension_repo_version_sha256_with_http_info(group_id, artifact_id, version, **kwargs)
else:
(data) = self.get_global_extension_repo_version_sha256_with_http_info(group_id, artifact_id, version, **kwargs)
return data
def get_global_extension_repo_version_sha256_with_http_info(self, group_id, artifact_id, version, **kwargs):
"""
Get global extension repo version checksum
Gets the hex representation of the SHA-256 digest for the binary content with the given bucket, group, artifact, and version. Since the same group-artifact-version can exist in multiple buckets, this will return the checksum of the first one returned. This will be consistent since the checksum must be the same when existing in multiple buckets. NOTE: This endpoint is subject to change as NiFi Registry and its REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_global_extension_repo_version_sha256_with_http_info(group_id, artifact_id, version, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str group_id: The group identifier (required)
:param str artifact_id: The artifact identifier (required)
:param str version: The version (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group_id', 'artifact_id', 'version']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_global_extension_repo_version_sha256" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `get_global_extension_repo_version_sha256`")
# verify the required parameter 'artifact_id' is set
if ('artifact_id' not in params) or (params['artifact_id'] is None):
raise ValueError("Missing the required parameter `artifact_id` when calling `get_global_extension_repo_version_sha256`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `get_global_extension_repo_version_sha256`")
collection_formats = {}
path_params = {}
if 'group_id' in params:
path_params['groupId'] = params['group_id']
if 'artifact_id' in params:
path_params['artifactId'] = params['artifact_id']
if 'version' in params:
path_params['version'] = params['version']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['tokenAuth', 'Authorization']
return self.api_client.call_api('/extension-repository/{groupId}/{artifactId}/{version}/sha256', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 50.44334
| 439
| 0.614801
| 8,388
| 76,119
| 5.340963
| 0.027778
| 0.037946
| 0.048571
| 0.044152
| 0.981763
| 0.979219
| 0.977946
| 0.972009
| 0.968237
| 0.963728
| 0
| 0.001648
| 0.306573
| 76,119
| 1,508
| 440
| 50.47679
| 0.84711
| 0.351699
| 0
| 0.796543
| 0
| 0.00133
| 0.233352
| 0.081767
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033245
| false
| 0
| 0.009309
| 0
| 0.091755
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a4872ff4cf1eb659c12fdad7332233f5427b062
| 4,163
|
py
|
Python
|
tests/visitors/test_signals.py
|
webstar-commit/django-boilerplate
|
3ac57bf990826d6a90386643452d05a7313e301e
|
[
"MIT"
] | 128
|
2020-05-10T13:17:52.000Z
|
2022-03-31T23:54:50.000Z
|
tests/visitors/test_signals.py
|
webstar-commit/django-boilerplate
|
3ac57bf990826d6a90386643452d05a7313e301e
|
[
"MIT"
] | 477
|
2020-05-07T16:00:05.000Z
|
2022-03-29T15:42:20.000Z
|
tests/visitors/test_signals.py
|
webstar-commit/django-boilerplate
|
3ac57bf990826d6a90386643452d05a7313e301e
|
[
"MIT"
] | 14
|
2020-05-10T14:21:40.000Z
|
2021-11-30T23:25:50.000Z
|
from parameterized import parameterized
from django_codemod.visitors import SignalDisconnectWeakTransformer
from tests.visitors.base import BaseVisitorTest
class TestSignalDisconnectWeakTransformer(BaseVisitorTest):
transformer = SignalDisconnectWeakTransformer
DJANGO_SIGNAL_NAMES = [
"pre_init",
"post_init",
"pre_save",
"post_save",
"pre_delete",
"post_delete",
"m2m_changed",
"pre_migrate",
"post_migrate",
]
@parameterized.expand(DJANGO_SIGNAL_NAMES)
def test_noop(self, signal_name):
before = after = f"""
from django.db.models.signals import {signal_name}
{signal_name}.disconnect(
receiver=some_handler,
sender=MyModel,
dispatch_uid='something-unique',
)
"""
self.assertCodemod(before, after)
@parameterized.expand(DJANGO_SIGNAL_NAMES)
def test_noop_import_star(self, signal_name):
before = after = f"""
from django.db.models.signals import *
{signal_name}.disconnect(receiver=some_handler, sender=MyModel, weak=True)
"""
self.assertCodemod(before, after)
@parameterized.expand(DJANGO_SIGNAL_NAMES)
def test_with_kwargs(self, signal_name):
before = f"""
from django.db.models.signals import {signal_name}
{signal_name}.disconnect(receiver=some_handler, sender=MyModel, weak=True)
"""
after = f"""
from django.db.models.signals import {signal_name}
{signal_name}.disconnect(receiver=some_handler, sender=MyModel)
"""
self.assertCodemod(before, after)
@parameterized.expand(DJANGO_SIGNAL_NAMES)
def test_with_kwargs_dispatch_uid(self, signal_name):
before = f"""
from django.db.models.signals import {signal_name}
{signal_name}.disconnect(
receiver=some_handler,
sender=MyModel,
weak=True,
dispatch_uid='my-unique-id',
)
"""
after = f"""
from django.db.models.signals import {signal_name}
{signal_name}.disconnect(
receiver=some_handler,
sender=MyModel,
dispatch_uid='my-unique-id',
)
"""
self.assertCodemod(before, after)
@parameterized.expand(DJANGO_SIGNAL_NAMES)
def test_imported_with_alias(self, signal_name):
before = f"""
from django.db.models.signals import {signal_name} as dj_{signal_name}
dj_{signal_name}.disconnect(receiver=some_handler, weak=True)
"""
after = f"""
from django.db.models.signals import {signal_name} as dj_{signal_name}
dj_{signal_name}.disconnect(receiver=some_handler)
"""
self.assertCodemod(before, after)
def test_multiple_signal_disconnected_single_import(self):
before = """
from django.db.models.signals import pre_save, post_save
pre_save.disconnect(receiver=some_handler, weak=True)
post_save.disconnect(receiver=some_handler, weak=True)
"""
after = """
from django.db.models.signals import pre_save, post_save
pre_save.disconnect(receiver=some_handler)
post_save.disconnect(receiver=some_handler)
"""
self.assertCodemod(before, after)
def test_multiple_signal_disconnected_separate_imports(self):
before = """
from django.db.models.signals import pre_save
from django.db.models.signals import post_save
pre_save.disconnect(receiver=some_handler, weak=True)
post_save.disconnect(receiver=some_handler, weak=True)
"""
after = """
from django.db.models.signals import pre_save
from django.db.models.signals import post_save
pre_save.disconnect(receiver=some_handler)
post_save.disconnect(receiver=some_handler)
"""
self.assertCodemod(before, after)
| 30.166667
| 86
| 0.620466
| 433
| 4,163
| 5.720554
| 0.143187
| 0.088817
| 0.142107
| 0.187323
| 0.840129
| 0.820347
| 0.820347
| 0.820347
| 0.799758
| 0.799758
| 0
| 0.000337
| 0.286332
| 4,163
| 137
| 87
| 30.386861
| 0.833389
| 0
| 0
| 0.686275
| 0
| 0
| 0.606053
| 0.275282
| 0
| 0
| 0
| 0
| 0.068627
| 1
| 0.068627
| false
| 0
| 0.205882
| 0
| 0.303922
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0a5187d25f1f5a6097d2fcba71e9cdfd45696bd5
| 10,121
|
py
|
Python
|
criarcargas.py
|
antoniocarlosdiniz8/criarcargas
|
613e5f49764ae4a6512e245ee24bc9d4aa9f937f
|
[
"MIT"
] | null | null | null |
criarcargas.py
|
antoniocarlosdiniz8/criarcargas
|
613e5f49764ae4a6512e245ee24bc9d4aa9f937f
|
[
"MIT"
] | null | null | null |
criarcargas.py
|
antoniocarlosdiniz8/criarcargas
|
613e5f49764ae4a6512e245ee24bc9d4aa9f937f
|
[
"MIT"
] | null | null | null |
import pyautogui as rb
from time import sleep
rb.hotkey('winleft', 'd')
u = rb.prompt('DIGITE SEU USUÁRIO!')
s = rb.password('DIGITE SUA SENHA!')
d = rb.prompt('DIGITE A DATA DA CARGA!')
m = rb.prompt('DIGITE O MÊS!')
sleep(2)
rb.doubleClick(39, 225)
sleep(2)
rb.click(738, 372)
sleep(2)
rb.write(u)
sleep(1)
rb.press('TAB')
sleep(1)
rb.write(s)
sleep(1)
rb.press('enter')
sleep(1)
#------------------------
rb.click(840, 447)
sleep(1)
rb.click(115, 99)
sleep(1)
rb.write('cargas')
sleep(1)
rb.press('enter')
sleep(1)
rb.press('capslock')
sleep(1)
rb.write('cargas')
sleep(1)
rb.press('enter')
sleep(1)
rb.write(d)
#sleep(1)
rb.press('right')
rb.write(m)
rb.press('TAB')
#sleep(1)
rb.write(d)
#sleep(1)
rb.press('enter')
#sleep(1)
#---------------CARGA 01 EXPORTADOS TODOS-----------------
rb.press('F6')
#sleep(1)
rb.write(d)
#sleep(1)
rb.press('right')
rb.write(m)
rb.press('TAB')
#sleep(1)
rb.write('001')
#sleep(1)
rb.press('F2')
#sleep(1)
#--------------CARGA 02 EM SEPARAÇÃO AGORA------------------
rb.press('F6')
#sleep(1)
rb.write(d)
#sleep(1)
rb.press('right')
rb.write(m)
rb.press('TAB')
#sleep(1)
rb.write('002')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('left')
#sleep(1)
rb.press('left')
rb.press('F2')
#--------------CARGA 03 CONFERIDO------------------
rb.press('F6')
#sleep(1)
rb.write(d)
#sleep(1)
rb.press('right')
rb.write(m)
rb.press('TAB')
#sleep(1)
rb.write('003')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('left')
rb.press('left')
rb.press('left')
rb.press('left')
rb.press('F2')
#--------------CARGA 04 RET------------------
rb.press('F6')
#sleep(1)
rb.write(d)
#sleep(1)
rb.press('right')
rb.write(m)
rb.press('TAB')
#sleep(1)
rb.write('004')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('left')
rb.press('left')
rb.press('left')
rb.press('left')
rb.press('left')
rb.press('F2')
#--------------CARGA 05 EXP DEPOIS------------------
rb.press('F6')
#sleep(1)
rb.write(d)
#sleep(1)
rb.press('right')
rb.write(m)
rb.press('TAB')
#sleep(1)
rb.write('005')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('right')
sleep(0.5)
rb.press('F2')
#--------------CARGA 06 ROTA 01 MANHÃ------------------
rb.press('F6')
#sleep(1)
rb.write(d)
#sleep(1)
rb.press('right')
rb.write(m)
rb.press('TAB')
#sleep(1)
rb.write('006')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('F2')
#--------------CARGA 07 ROTA 02 MANHÃ------------------
rb.press('F6')
#sleep(1)
rb.write(d)
#sleep(1)
rb.press('right')
rb.write(m)
rb.press('TAB')
#sleep(1)
rb.write('007')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('F2')
#--------------CARGA 08 ROTA 03 MANHÃ------------------
rb.press('F6')
#sleep(1)
rb.write(d)
#sleep(1)
rb.press('right')
rb.write(m)
rb.press('TAB')
#sleep(1)
rb.write('008')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('F2')
#--------------CARGA 09 ROTA 04 MANHÃ------------------
rb.press('F6')
#sleep(1)
rb.write(d)
#sleep(1)
rb.press('right')
rb.write(m)
rb.press('TAB')
#sleep(1)
rb.write('009')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('F2')
#--------------CARGA 10 ROTA 05 MANHÃ------------------
rb.press('F6')
#sleep(1)
rb.write(d)
#sleep(1)
rb.press('right')
rb.write(m)
rb.press('TAB')
#sleep(1)
rb.write('010')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('F2')
#--------------CARGA 11 ROTA 06 MANHÃ------------------
rb.press('F6')
#sleep(1)
rb.write(d)
#sleep(1)
rb.press('right')
rb.write(m)
rb.press('TAB')
#sleep(1)
rb.write('011')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('F2')
#--------------CARGA 12 ROTA 07 MANHÃ------------------
rb.press('F6')
#sleep(1)
rb.write(d)
#sleep(1)
rb.press('right')
rb.write(m)
rb.press('TAB')
#sleep(1)
rb.write('012')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('F2')
#--------------CARGA 13 EMISSÃO DE NF´S------------------
rb.press('F6')
#sleep(1)
rb.write(d)
#sleep(1)
rb.press('right')
rb.write(m)
rb.press('TAB')
#sleep(1)
rb.write('013')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('left')
rb.press('F2')
#--------------CARGA 14 ROTA 01 TARDE------------------
rb.press('F6')
#sleep(1)
rb.write(d)
#sleep(1)
rb.press('right')
rb.write(m)
rb.press('TAB')
#sleep(1)
rb.write('014')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('F2')
#--------------CARGA 15 ROTA 02 TARDE------------------
rb.press('F6')
#sleep(1)
rb.write(d)
#sleep(1)
rb.press('right')
rb.write(m)
rb.press('TAB')
#sleep(1)
rb.write('015')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('F2')
#--------------CARGA 16 ROTA 03 TARDE------------------
rb.press('F6')
#sleep(1)
rb.write(d)
#sleep(1)
rb.press('right')
rb.write(m)
rb.press('TAB')
#sleep(1)
rb.write('016')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('F2')
#--------------CARGA 17 ROTA 04 TARDE------------------
rb.press('F6')
#sleep(1)
rb.write(d)
#sleep(1)
rb.press('right')
rb.write(m)
rb.press('TAB')
#sleep(1)
rb.write('017')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('F2')
#--------------CARGA 18 ROTA 05 TARDE------------------
rb.press('F6')
#sleep(1)
rb.write(d)
#sleep(1)
rb.press('right')
rb.write(m)
rb.press('TAB')
#sleep(1)
rb.write('018')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('F2')
#--------------CARGA 19 ROTA 06 TARDE------------------
rb.press('F6')
#sleep(1)
rb.write(d)
#sleep(1)
rb.press('right')
rb.write(m)
rb.press('TAB')
#sleep(1)
rb.write('019')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('F2')
#--------------CARGA 20 ROTA 07 TARDE------------------
rb.press('F6')
#sleep(1)
rb.write(d)
#sleep(1)
rb.press('right')
rb.write(m)
rb.press('TAB')
#sleep(1)
rb.write('020')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('TAB')
#sleep(1)
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('right')
rb.press('F2')
| 15.838811
| 60
| 0.600731
| 1,773
| 10,121
| 3.42978
| 0.059786
| 0.394836
| 0.249959
| 0.386778
| 0.90939
| 0.90939
| 0.90939
| 0.906759
| 0.898536
| 0.889985
| 0
| 0.041382
| 0.075981
| 10,121
| 639
| 61
| 15.838811
| 0.608747
| 0.251457
| 0
| 0.919725
| 0
| 0
| 0.203932
| 0
| 0
| 0
| 0
| 0.001565
| 0
| 1
| 0
| false
| 0.002294
| 0.004587
| 0
| 0.004587
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
0a6f0aad88afcfd41998f44f011770cfd94d87dc
| 21,726
|
py
|
Python
|
tb_rest_client/api/api_pe/blob_entity_controller_api.py
|
maksonlee/python_tb_rest_client
|
a6cd17ef4de31f68c3226b7a9835292fbac4b1fa
|
[
"Apache-2.0"
] | 1
|
2021-07-19T10:09:04.000Z
|
2021-07-19T10:09:04.000Z
|
tb_rest_client/api/api_pe/blob_entity_controller_api.py
|
moravcik94/python_tb_rest_client
|
985361890cdf4ccce93d2b24905ad9003c8dfcaa
|
[
"Apache-2.0"
] | null | null | null |
tb_rest_client/api/api_pe/blob_entity_controller_api.py
|
moravcik94/python_tb_rest_client
|
985361890cdf4ccce93d2b24905ad9003c8dfcaa
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
# Copyright 2020. ThingsBoard
# #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# #
# http://www.apache.org/licenses/LICENSE-2.0
# #
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from tb_rest_client.api_client import ApiClient
class BlobEntityControllerApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def delete_blob_entity_using_delete(self, blob_entity_id, **kwargs): # noqa: E501
"""deleteBlobEntity # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.delete_blob_entity_using_delete(blob_entity_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str blob_entity_id: blobEntityId (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_blob_entity_using_delete_with_http_info(blob_entity_id, **kwargs) # noqa: E501
else:
(data) = self.delete_blob_entity_using_delete_with_http_info(blob_entity_id, **kwargs) # noqa: E501
return data
def delete_blob_entity_using_delete_with_http_info(self, blob_entity_id, **kwargs): # noqa: E501
"""deleteBlobEntity # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.delete_blob_entity_using_delete_with_http_info(blob_entity_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str blob_entity_id: blobEntityId (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['blob_entity_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'blob_entity_id' is set
if ('blob_entity_id' not in params or
params['blob_entity_id'] is None):
raise ValueError("Missing the required parameter `blob_entity_id` when calling `delete_blob_entity_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'blob_entity_id' in params:
path_params['blobEntityId'] = params['blob_entity_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/blobEntity/{blobEntityId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def download_blob_entity_using_get(self, blob_entity_id, **kwargs): # noqa: E501
"""downloadBlobEntity # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.download_blob_entity_using_get(blob_entity_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str blob_entity_id: blobEntityId (required)
:return: Resource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.download_blob_entity_using_get_with_http_info(blob_entity_id, **kwargs) # noqa: E501
else:
(data) = self.download_blob_entity_using_get_with_http_info(blob_entity_id, **kwargs) # noqa: E501
return data
def download_blob_entity_using_get_with_http_info(self, blob_entity_id, **kwargs): # noqa: E501
"""downloadBlobEntity # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.download_blob_entity_using_get_with_http_info(blob_entity_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str blob_entity_id: blobEntityId (required)
:return: Resource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['blob_entity_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'blob_entity_id' is set
if ('blob_entity_id' not in params or
params['blob_entity_id'] is None):
raise ValueError("Missing the required parameter `blob_entity_id` when calling `download_blob_entity_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'blob_entity_id' in params:
path_params['blobEntityId'] = params['blob_entity_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/blobEntity/{blobEntityId}/download', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Resource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_blob_entities_by_ids_using_get(self, blob_entity_ids, **kwargs): # noqa: E501
"""getBlobEntitiesByIds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_blob_entities_by_ids_using_get(blob_entity_ids, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str blob_entity_ids: blobEntityIds (required)
:return: list[BlobEntityInfo]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_blob_entities_by_ids_using_get_with_http_info(blob_entity_ids, **kwargs) # noqa: E501
else:
(data) = self.get_blob_entities_by_ids_using_get_with_http_info(blob_entity_ids, **kwargs) # noqa: E501
return data
def get_blob_entities_by_ids_using_get_with_http_info(self, blob_entity_ids, **kwargs): # noqa: E501
"""getBlobEntitiesByIds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_blob_entities_by_ids_using_get_with_http_info(blob_entity_ids, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str blob_entity_ids: blobEntityIds (required)
:return: list[BlobEntityInfo]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['blob_entity_ids'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'blob_entity_ids' is set
if ('blob_entity_ids' not in params or
params['blob_entity_ids'] is None):
raise ValueError("Missing the required parameter `blob_entity_ids` when calling `get_blob_entities_by_ids_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'blob_entity_ids' in params:
query_params.append(('blobEntityIds', params['blob_entity_ids'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/blobEntities{?blobEntityIds}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[BlobEntityInfo]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_blob_entities_using_get(self, page_size, page, **kwargs): # noqa: E501
"""getBlobEntities # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_blob_entities_using_get(page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_size: pageSize (required)
:param int page: page (required)
:param str type: type
:param str text_search: textSearch
:param str sort_property: sortProperty
:param str sort_order: sortOrder
:param int start_time: startTime
:param int end_time: endTime
:return: PageDataBlobEntityWithCustomerInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_blob_entities_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501
else:
(data) = self.get_blob_entities_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501
return data
def get_blob_entities_using_get_with_http_info(self, page_size, page, **kwargs): # noqa: E501
"""getBlobEntities # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_blob_entities_using_get_with_http_info(page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_size: pageSize (required)
:param int page: page (required)
:param str type: type
:param str text_search: textSearch
:param str sort_property: sortProperty
:param str sort_order: sortOrder
:param int start_time: startTime
:param int end_time: endTime
:return: PageDataBlobEntityWithCustomerInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page_size', 'page', 'type', 'text_search', 'sort_property', 'sort_order', 'start_time', 'end_time'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'page_size' is set
if ('page_size' not in params or
params['page_size'] is None):
raise ValueError("Missing the required parameter `page_size` when calling `get_blob_entities_using_get`") # noqa: E501
# verify the required parameter 'page' is set
if ('page' not in params or
params['page'] is None):
raise ValueError("Missing the required parameter `page` when calling `get_blob_entities_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'page_size' in params:
query_params.append(('pageSize', params['page_size'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'type' in params:
query_params.append(('type', params['type'])) # noqa: E501
if 'text_search' in params:
query_params.append(('textSearch', params['text_search'])) # noqa: E501
if 'sort_property' in params:
query_params.append(('sortProperty', params['sort_property'])) # noqa: E501
if 'sort_order' in params:
query_params.append(('sortOrder', params['sort_order'])) # noqa: E501
if 'start_time' in params:
query_params.append(('startTime', params['start_time'])) # noqa: E501
if 'end_time' in params:
query_params.append(('endTime', params['end_time'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/blobEntities{?pageSize,page,type,textSearch,sortProperty,sortOrder,startTime,endTime}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageDataBlobEntityWithCustomerInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_blob_entity_info_by_id_using_get(self, blob_entity_id, **kwargs): # noqa: E501
"""getBlobEntityInfoById # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_blob_entity_info_by_id_using_get(blob_entity_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str blob_entity_id: blobEntityId (required)
:return: BlobEntityWithCustomerInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_blob_entity_info_by_id_using_get_with_http_info(blob_entity_id, **kwargs) # noqa: E501
else:
(data) = self.get_blob_entity_info_by_id_using_get_with_http_info(blob_entity_id, **kwargs) # noqa: E501
return data
def get_blob_entity_info_by_id_using_get_with_http_info(self, blob_entity_id, **kwargs): # noqa: E501
"""getBlobEntityInfoById # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_blob_entity_info_by_id_using_get_with_http_info(blob_entity_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str blob_entity_id: blobEntityId (required)
:return: BlobEntityWithCustomerInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['blob_entity_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'blob_entity_id' is set
if ('blob_entity_id' not in params or
params['blob_entity_id'] is None):
raise ValueError("Missing the required parameter `blob_entity_id` when calling `get_blob_entity_info_by_id_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'blob_entity_id' in params:
path_params['blobEntityId'] = params['blob_entity_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/blobEntity/info/{blobEntityId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BlobEntityWithCustomerInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 40.761726
| 145
| 0.634493
| 2,572
| 21,726
| 5.041602
| 0.082815
| 0.062466
| 0.041644
| 0.027763
| 0.890645
| 0.868281
| 0.853783
| 0.850004
| 0.842369
| 0.822395
| 0
| 0.015727
| 0.274188
| 21,726
| 532
| 146
| 40.838346
| 0.806583
| 0.335128
| 0
| 0.738007
| 0
| 0.00369
| 0.19603
| 0.060974
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04059
| false
| 0
| 0.01476
| 0
| 0.114391
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6a9f8c226c1214007b807cf0728a345814b94f90
| 33,576
|
py
|
Python
|
src/pymor/operators/cg.py
|
sdrave/pymor
|
3fa4adf0fcc8e34eacc6c2dc9d5439046cf8fc0a
|
[
"Unlicense"
] | null | null | null |
src/pymor/operators/cg.py
|
sdrave/pymor
|
3fa4adf0fcc8e34eacc6c2dc9d5439046cf8fc0a
|
[
"Unlicense"
] | null | null | null |
src/pymor/operators/cg.py
|
sdrave/pymor
|
3fa4adf0fcc8e34eacc6c2dc9d5439046cf8fc0a
|
[
"Unlicense"
] | null | null | null |
# -*- coding: utf-8 -*-
# This file is part of the pyMOR project (http://www.pymor.org).
# Copyright Holders: Rene Milk, Stephan Rave, Felix Schindler
# License: BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause)
#
# Contributors: Michael Schaefer <michael.schaefer@uni-muenster.de>
# lucas-ca <lucascamp@web.de>
""" This module provides some operators for continuous finite element discretizations."""
from __future__ import absolute_import, division, print_function
import numpy as np
from scipy.sparse import coo_matrix, csc_matrix
from pymor.functions.interfaces import FunctionInterface
from pymor.grids.referenceelements import triangle, line, square
from pymor.operators.numpy import NumpyMatrixBasedOperator
from pymor.vectorarrays.numpy import NumpyVectorSpace
class L2ProductFunctionalP1(NumpyMatrixBasedOperator):
"""|Functional| representing the scalar product with an L2-|Function| for linear finite elements.
Boundary treatment can be performed by providing `boundary_info` and `dirichlet_data`,
in which case the DOFs corresponding to Dirichlet boundaries are set to the values
provided by `dirichlet_data`. Neumann boundaries are handled by providing a
`neumann_data` function, Robin boundaries by providing a `robin_data` tuple.
The current implementation works in one and two dimensions, but can be trivially
extended to arbitrary dimensions.
Parameters
----------
grid
|Grid| for which to assemble the functional.
function
The |Function| with which to take the scalar product.
boundary_info
|BoundaryInfo| determining the Dirichlet and Neumann boundaries or `None`.
If `None`, no boundary treatment is performed.
dirichlet_data
|Function| providing the Dirichlet boundary values. If `None`,
constant-zero boundary is assumed.
neumann_data
|Function| providing the Neumann boundary values. If `None`,
constant-zero is assumed.
robin_data
Tuple of two |Functions| providing the Robin parameter and boundary values, see `RobinBoundaryOperator`.
If `None`, constant-zero for both functions is assumed.
order
Order of the Gauss quadrature to use for numerical integration.
name
The name of the functional.
"""
sparse = False
range = NumpyVectorSpace(1)
def __init__(self, grid, function, boundary_info=None, dirichlet_data=None, neumann_data=None, robin_data=None,
order=2, name=None):
assert grid.reference_element(0) in {line, triangle}
assert function.shape_range == tuple()
self.source = NumpyVectorSpace(grid.size(grid.dim))
self.grid = grid
self.boundary_info = boundary_info
self.function = function
self.dirichlet_data = dirichlet_data
self.neumann_data = neumann_data
self.robin_data = robin_data
self.order = order
self.name = name
self.build_parameter_type(inherits=(function, dirichlet_data, neumann_data))
def _assemble(self, mu=None):
g = self.grid
bi = self.boundary_info
# evaluate function at all quadrature points -> shape = (g.size(0), number of quadrature points)
F = self.function(g.quadrature_points(0, order=self.order), mu=mu)
# evaluate the shape functions at the quadrature points on the reference
# element -> shape = (number of shape functions, number of quadrature points)
q, w = g.reference_element.quadrature(order=self.order)
if g.dim == 1:
SF = np.array((1 - q[..., 0], q[..., 0]))
elif g.dim == 2:
SF = np.array(((1 - np.sum(q, axis=-1)),
q[..., 0],
q[..., 1]))
else:
raise NotImplementedError
# integrate the products of the function with the shape functions on each element
# -> shape = (g.size(0), number of shape functions)
SF_INTS = np.einsum('ei,pi,e,i->ep', F, SF, g.integration_elements(0), w).ravel()
# map local DOFs to global DOFs
# FIXME This implementation is horrible, find a better way!
SF_I = g.subentities(0, g.dim).ravel()
I = np.array(coo_matrix((SF_INTS, (np.zeros_like(SF_I), SF_I)), shape=(1, g.size(g.dim))).todense()).ravel()
# neumann boundary treatment
if bi is not None and bi.has_neumann and self.neumann_data is not None:
NI = bi.neumann_boundaries(1)
if g.dim == 1:
I[NI] -= self.neumann_data(g.centers(1)[NI])
else:
F = -self.neumann_data(g.quadrature_points(1, order=self.order)[NI], mu=mu)
q, w = line.quadrature(order=self.order)
SF = np.squeeze(np.array([1 - q, q]))
SF_INTS = np.einsum('ei,pi,e,i->ep', F, SF, g.integration_elements(1)[NI], w).ravel()
SF_I = g.subentities(1, 2)[NI].ravel()
I += np.array(coo_matrix((SF_INTS, (np.zeros_like(SF_I), SF_I)), shape=(1, g.size(g.dim)))
.todense()).ravel()
# robin boundary treatment
if bi is not None and bi.has_robin and self.robin_data is not None:
RI = bi.robin_boundaries(1)
if g.dim == 1:
xref = g.centers(1)[RI]
I[RI] += (self.robin_data[0](xref) * self.robin_data[1](xref))
else:
xref = g.quadrature_points(1, order=self.order)[RI]
F = (self.robin_data[0](xref, mu=mu) * self.robin_data[1](xref, mu=mu))
q, w = line.quadrature(order=self.order)
SF = np.squeeze(np.array([1 - q, q]))
SF_INTS = np.einsum('ei,pi,e,i->ep', F, SF, g.integration_elements(1)[RI], w).ravel()
SF_I = g.subentities(1, 2)[RI].ravel()
I += np.array(coo_matrix((SF_INTS, (np.zeros_like(SF_I), SF_I)), shape=(1, g.size(g.dim)))
.todense()).ravel()
if bi is not None and bi.has_dirichlet:
DI = bi.dirichlet_boundaries(g.dim)
if self.dirichlet_data is not None:
I[DI] = self.dirichlet_data(g.centers(g.dim)[DI], mu=mu)
else:
I[DI] = 0
return I.reshape((1, -1))
class L2ProductFunctionalQ1(NumpyMatrixBasedOperator):
"""|Functional| representing the scalar product with an L2-|Function| for bilinear finite elements.
Boundary treatment can be performed by providing `boundary_info` and `dirichlet_data`,
in which case the DOFs corresponding to Dirichlet boundaries are set to the values
provided by `dirichlet_data`. Neumann boundaries are handled by providing a
`neumann_data` function, Robin boundaries by providing a `robin_data` tuple.
The current implementation works in two dimensions, but can be trivially
extended to arbitrary dimensions.
Parameters
----------
grid
|Grid| for which to assemble the functional.
function
The |Function| with which to take the scalar product.
boundary_info
|BoundaryInfo| determining the Dirichlet boundaries or `None`.
If `None`, no boundary treatment is performed.
dirichlet_data
|Function| providing the Dirichlet boundary values. If `None`,
constant-zero boundary is assumed.
neumann_data
|Function| providing the Neumann boundary values. If `None`,
constant-zero is assumed.
robin_data
Tuple of two |Functions| providing the Robin parameter and boundary values, see `RobinBoundaryOperator`.
If `None`, constant-zero for both functions is assumed.
order
Order of the Gauss quadrature to use for numerical integration.
name
The name of the functional.
"""
sparse = False
range = NumpyVectorSpace(1)
def __init__(self, grid, function, boundary_info=None, dirichlet_data=None, neumann_data=None, robin_data=None,
order=2, name=None):
assert grid.reference_element(0) in {square}
assert function.shape_range == tuple()
self.source = NumpyVectorSpace(grid.size(grid.dim))
self.grid = grid
self.boundary_info = boundary_info
self.function = function
self.dirichlet_data = dirichlet_data
self.neumann_data = neumann_data
self.robin_data = robin_data
self.order = order
self.name = name
self.build_parameter_type(inherits=(function, dirichlet_data))
def _assemble(self, mu=None):
g = self.grid
bi = self.boundary_info
# evaluate function at all quadrature points -> shape = (g.size(0), number of quadrature points)
F = self.function(g.quadrature_points(0, order=self.order), mu=mu)
# evaluate the shape functions at the quadrature points on the reference
# element -> shape = (number of shape functions, number of quadrature points)
q, w = g.reference_element.quadrature(order=self.order)
if g.dim == 2:
SF = np.array(((1 - q[..., 0]) * (1 - q[..., 1]),
(1 - q[..., 1]) * (q[..., 0]),
(q[..., 0]) * (q[..., 1]),
(q[..., 1]) * (1 - q[..., 0])))
else:
raise NotImplementedError
# integrate the products of the function with the shape functions on each element
# -> shape = (g.size(0), number of shape functions)
SF_INTS = np.einsum('ei,pi,e,i->ep', F, SF, g.integration_elements(0), w).ravel()
# map local DOFs to global DOFs
# FIXME This implementation is horrible, find a better way!
SF_I = g.subentities(0, g.dim).ravel()
I = np.array(coo_matrix((SF_INTS, (np.zeros_like(SF_I), SF_I)), shape=(1, g.size(g.dim))).todense()).ravel()
# neumann boundary treatment
if bi is not None and bi.has_neumann and self.neumann_data is not None:
NI = bi.neumann_boundaries(1)
F = -self.neumann_data(g.quadrature_points(1, order=self.order)[NI], mu=mu)
q, w = line.quadrature(order=self.order)
SF = np.squeeze(np.array([1 - q, q]))
SF_INTS = np.einsum('ei,pi,e,i->ep', F, SF, g.integration_elements(1)[NI], w).ravel()
SF_I = g.subentities(1, 2)[NI].ravel()
I += np.array(coo_matrix((SF_INTS, (np.zeros_like(SF_I), SF_I)), shape=(1, g.size(g.dim)))
.todense()).ravel()
if bi is not None and bi.has_robin and self.robin_data is not None:
RI = bi.robin_boundaries(1)
xref = g.quadrature_points(1, order=self.order)[RI]
F = self.robin_data[0](xref, mu=mu) * self.robin_data[1](xref, mu=mu)
q, w = line.quadrature(order=self.order)
SF = np.squeeze(np.array([1 - q, q]))
SF_INTS = np.einsum('ei,pi,e,i->ep', F, SF, g.integration_elements(1)[RI], w).ravel()
SF_I = g.subentities(1, 2)[RI].ravel()
I += np.array(coo_matrix((SF_INTS, (np.zeros_like(SF_I), SF_I)), shape=(1, g.size(g.dim)))
.todense()).ravel()
if bi is not None and bi.has_dirichlet:
DI = bi.dirichlet_boundaries(g.dim)
if self.dirichlet_data is not None:
I[DI] = self.dirichlet_data(g.centers(g.dim)[DI], mu=mu)
else:
I[DI] = 0
return I.reshape((1, -1))
class L2ProductP1(NumpyMatrixBasedOperator):
"""|Operator| representing the L2-product between linear finite element functions.
To evaluate the product use the :meth:`~pymor.operators.interfaces.OperatorInterface.apply2`
method.
The current implementation works in one and two dimensions, but can be trivially
extended to arbitrary dimensions.
Parameters
----------
grid
The |Grid| for which to assemble the product.
boundary_info
|BoundaryInfo| for the treatment of Dirichlet boundary conditions.
dirichlet_clear_rows
If `True`, set the rows of the system matrix corresponding to Dirichlet boundary
DOFs to zero. (Useful when used as mass matrix in time-stepping schemes.)
dirichlet_clear_columns
If `True`, set columns of the system matrix corresponding to Dirichlet boundary
DOFs to zero (to obtain a symmetric matrix).
dirichlet_clear_diag
If `True`, also set diagonal entries corresponding to Dirichlet boundary DOFs to
zero (e.g. for affine decomposition). Otherwise, if either `dirichlet_clear_rows` or
`dirichlet_clear_columns` is `True`, the diagonal entries are set to one.
name
The name of the product.
"""
sparse = True
def __init__(self, grid, boundary_info, dirichlet_clear_rows=True, dirichlet_clear_columns=False,
dirichlet_clear_diag=False, name=None):
assert grid.reference_element in (line, triangle)
self.source = self.range = NumpyVectorSpace(grid.size(grid.dim))
self.grid = grid
self.boundary_info = boundary_info
self.dirichlet_clear_rows = dirichlet_clear_rows
self.dirichlet_clear_columns = dirichlet_clear_columns
self.dirichlet_clear_diag = dirichlet_clear_diag
self.name = name
def _assemble(self, mu=None):
g = self.grid
bi = self.boundary_info
# our shape functions
if g.dim == 2:
SF = [lambda X: 1 - X[..., 0] - X[..., 1],
lambda X: X[..., 0],
lambda X: X[..., 1]]
q, w = triangle.quadrature(order=2)
elif g.dim == 1:
SF = [lambda X: 1 - X[..., 0],
lambda X: X[..., 0]]
q, w = line.quadrature(order=2)
else:
raise NotImplementedError
# evaluate the shape functions on the quadrature points
SFQ = np.array(tuple(f(q) for f in SF))
self.logger.info('Integrate the products of the shape functions on each element')
# -> shape = (g.size(0), number of shape functions ** 2)
SF_INTS = np.einsum('iq,jq,q,e->eij', SFQ, SFQ, w, g.integration_elements(0)).ravel()
del SFQ
self.logger.info('Determine global dofs ...')
SF_I0 = np.repeat(g.subentities(0, g.dim), g.dim + 1, axis=1).ravel()
SF_I1 = np.tile(g.subentities(0, g.dim), [1, g.dim + 1]).ravel()
self.logger.info('Boundary treatment ...')
if bi.has_dirichlet:
if self.dirichlet_clear_rows:
SF_INTS = np.where(bi.dirichlet_mask(g.dim)[SF_I0], 0, SF_INTS)
if self.dirichlet_clear_columns:
SF_INTS = np.where(bi.dirichlet_mask(g.dim)[SF_I1], 0, SF_INTS)
if not self.dirichlet_clear_diag and (self.dirichlet_clear_rows or self.dirichlet_clear_columns):
SF_INTS = np.hstack((SF_INTS, np.ones(bi.dirichlet_boundaries(g.dim).size)))
SF_I0 = np.hstack((SF_I0, bi.dirichlet_boundaries(g.dim)))
SF_I1 = np.hstack((SF_I1, bi.dirichlet_boundaries(g.dim)))
self.logger.info('Assemble system matrix ...')
A = coo_matrix((SF_INTS, (SF_I0, SF_I1)), shape=(g.size(g.dim), g.size(g.dim)))
del SF_INTS, SF_I0, SF_I1
A = csc_matrix(A).copy() # See DiffusionOperatorP1 for why copy() is necessary
return A
class L2ProductQ1(NumpyMatrixBasedOperator):
"""|Operator| representing the L2-product between bilinear finite element functions.
To evaluate the product use the :meth:`~pymor.operators.interfaces module.OperatorInterface.apply2`
method.
The current implementation works in two dimensions, but can be trivially
extended to arbitrary dimensions.
Parameters
----------
grid
The |Grid| for which to assemble the product.
boundary_info
|BoundaryInfo| for the treatment of Dirichlet boundary conditions.
dirichlet_clear_rows
If `True`, set the rows of the system matrix corresponding to Dirichlet boundary
DOFs to zero. (Useful when used as mass matrix in time-stepping schemes.)
dirichlet_clear_columns
If `True`, set columns of the system matrix corresponding to Dirichlet boundary
DOFs to zero (to obtain a symmetric matrix).
dirichlet_clear_diag
If `True`, also set diagonal entries corresponding to Dirichlet boundary DOFs to
zero (e.g. for affine decomposition). Otherwise, if either `dirichlet_clear_rows` or
`dirichlet_clear_columns` is `True`, the diagonal entries are set to one.
name
The name of the product.
"""
sparse = True
def __init__(self, grid, boundary_info, dirichlet_clear_rows=True, dirichlet_clear_columns=False,
dirichlet_clear_diag=False, name=None):
assert grid.reference_element in {square}
self.source = self.range = NumpyVectorSpace(grid.size(grid.dim))
self.grid = grid
self.boundary_info = boundary_info
self.dirichlet_clear_rows = dirichlet_clear_rows
self.dirichlet_clear_columns = dirichlet_clear_columns
self.dirichlet_clear_diag = dirichlet_clear_diag
self.name = name
def _assemble(self, mu=None):
g = self.grid
bi = self.boundary_info
# our shape functions
if g.dim == 2:
SF = [lambda X: (1 - X[..., 0]) * (1 - X[..., 1]),
lambda X: (1 - X[..., 1]) * (X[..., 0]),
lambda X: (X[..., 0]) * (X[..., 1]),
lambda X: (1 - X[..., 0]) * (X[..., 1])]
else:
raise NotImplementedError
q, w = square.quadrature(order=2)
# evaluate the shape functions on the quadrature points
SFQ = np.array(tuple(f(q) for f in SF))
self.logger.info('Integrate the products of the shape functions on each element')
# -> shape = (g.size(0), number of shape functions ** 2)
SF_INTS = np.einsum('iq,jq,q,e->eij', SFQ, SFQ, w, g.integration_elements(0)).ravel()
del SFQ
self.logger.info('Determine global dofs ...')
SF_I0 = np.repeat(g.subentities(0, g.dim), 4, axis=1).ravel()
SF_I1 = np.tile(g.subentities(0, g.dim), [1, 4]).ravel()
self.logger.info('Boundary treatment ...')
if bi.has_dirichlet:
if self.dirichlet_clear_rows:
SF_INTS = np.where(bi.dirichlet_mask(g.dim)[SF_I0], 0, SF_INTS)
if self.dirichlet_clear_columns:
SF_INTS = np.where(bi.dirichlet_mask(g.dim)[SF_I1], 0, SF_INTS)
if not self.dirichlet_clear_diag and (self.dirichlet_clear_rows or self.dirichlet_clear_columns):
SF_INTS = np.hstack((SF_INTS, np.ones(bi.dirichlet_boundaries(g.dim).size)))
SF_I0 = np.hstack((SF_I0, bi.dirichlet_boundaries(g.dim)))
SF_I1 = np.hstack((SF_I1, bi.dirichlet_boundaries(g.dim)))
self.logger.info('Assemble system matrix ...')
A = coo_matrix((SF_INTS, (SF_I0, SF_I1)), shape=(g.size(g.dim), g.size(g.dim)))
del SF_INTS, SF_I0, SF_I1
A = csc_matrix(A).copy() # See DiffusionOperatorP1 for why copy() is necessary
return A
class DiffusionOperatorP1(NumpyMatrixBasedOperator):
"""Diffusion |Operator| for linear finite elements.
The operator is of the form ::
(Lu)(x) = c ∇ ⋅ [ d(x) ∇ u(x) ]
The function `d` can be scalar- or matrix-valued.
The current implementation works in one and two dimensions, but can be trivially
extended to arbitrary dimensions.
Parameters
----------
grid
The |Grid| for which to assemble the operator.
boundary_info
|BoundaryInfo| for the treatment of Dirichlet boundary conditions.
diffusion_function
The |Function| `d(x)` with ``shape_range == tuple()`` or
``shape_range = (grid.dim_outer, grid.dim_outer)``. If `None`, constant one is
assumed.
diffusion_constant
The constant `c`. If `None`, `c` is set to one.
dirichlet_clear_columns
If `True`, set columns of the system matrix corresponding to Dirichlet boundary
DOFs to zero to obtain a symmetric system matrix. Otherwise, only the rows will
be set to zero.
dirichlet_clear_diag
If `True`, also set diagonal entries corresponding to Dirichlet boundary DOFs to
zero (e.g. for affine decomposition). Otherwise they are set to one.
name
Name of the operator.
"""
sparse = True
def __init__(self, grid, boundary_info, diffusion_function=None, diffusion_constant=None,
dirichlet_clear_columns=False, dirichlet_clear_diag=False, name=None):
assert grid.reference_element(0) in {triangle, line}, 'A simplicial grid is expected!'
assert diffusion_function is None \
or (isinstance(diffusion_function, FunctionInterface) and
diffusion_function.dim_domain == grid.dim_outer and
diffusion_function.shape_range == tuple() or diffusion_function.shape_range == (grid.dim_outer,) * 2)
self.source = self.range = NumpyVectorSpace(grid.size(grid.dim))
self.grid = grid
self.boundary_info = boundary_info
self.diffusion_constant = diffusion_constant
self.diffusion_function = diffusion_function
self.dirichlet_clear_columns = dirichlet_clear_columns
self.dirichlet_clear_diag = dirichlet_clear_diag
self.name = name
if diffusion_function is not None:
self.build_parameter_type(inherits=(diffusion_function,))
def _assemble(self, mu=None):
g = self.grid
bi = self.boundary_info
# gradients of shape functions
if g.dim == 2:
SF_GRAD = np.array(([-1., -1.],
[1., 0.],
[0., 1.]))
elif g.dim == 1:
SF_GRAD = np.array(([-1.],
[1., ]))
else:
raise NotImplementedError
self.logger.info('Calulate gradients of shape functions transformed by reference map ...')
SF_GRADS = np.einsum('eij,pj->epi', g.jacobian_inverse_transposed(0), SF_GRAD)
self.logger.info('Calculate all local scalar products beween gradients ...')
if self.diffusion_function is not None and self.diffusion_function.shape_range == tuple():
D = self.diffusion_function(self.grid.centers(0), mu=mu)
SF_INTS = np.einsum('epi,eqi,e,e->epq', SF_GRADS, SF_GRADS, g.volumes(0), D).ravel()
del D
elif self.diffusion_function is not None:
D = self.diffusion_function(self.grid.centers(0), mu=mu)
SF_INTS = np.einsum('epi,eqj,e,eij->epq', SF_GRADS, SF_GRADS, g.volumes(0), D).ravel()
del D
else:
SF_INTS = np.einsum('epi,eqi,e->epq', SF_GRADS, SF_GRADS, g.volumes(0)).ravel()
del SF_GRADS
if self.diffusion_constant is not None:
SF_INTS *= self.diffusion_constant
self.logger.info('Determine global dofs ...')
SF_I0 = np.repeat(g.subentities(0, g.dim), g.dim + 1, axis=1).ravel()
SF_I1 = np.tile(g.subentities(0, g.dim), [1, g.dim + 1]).ravel()
self.logger.info('Boundary treatment ...')
if bi.has_dirichlet:
SF_INTS = np.where(bi.dirichlet_mask(g.dim)[SF_I0], 0, SF_INTS)
if self.dirichlet_clear_columns:
SF_INTS = np.where(bi.dirichlet_mask(g.dim)[SF_I1], 0, SF_INTS)
if not self.dirichlet_clear_diag:
SF_INTS = np.hstack((SF_INTS, np.ones(bi.dirichlet_boundaries(g.dim).size)))
SF_I0 = np.hstack((SF_I0, bi.dirichlet_boundaries(g.dim)))
SF_I1 = np.hstack((SF_I1, bi.dirichlet_boundaries(g.dim)))
self.logger.info('Assemble system matrix ...')
A = coo_matrix((SF_INTS, (SF_I0, SF_I1)), shape=(g.size(g.dim), g.size(g.dim)))
del SF_INTS, SF_I0, SF_I1
A = csc_matrix(A).copy()
# The call to copy() is necessary to resize the data arrays of the sparse matrix:
# During the conversion to crs_matrix, entries corresponding with the same
# coordinates are summed up, resulting in shorter data arrays. The shortening
# is implemented by calling self.prune() which creates the view self.data[:self.nnz].
# Thus, the original data array is not deleted and all memory stays allocated.
return A
class DiffusionOperatorQ1(NumpyMatrixBasedOperator):
"""Diffusion |Operator| for bilinear finite elements.
The operator is of the form ::
(Lu)(x) = c ∇ ⋅ [ d(x) ∇ u(x) ]
The function `d` can be scalar- or matrix-valued.
The current implementation works in two dimensions, but can be trivially
extended to arbitrary dimensions.
Parameters
----------
grid
The |Grid| for which to assemble the operator.
boundary_info
|BoundaryInfo| for the treatment of Dirichlet boundary conditions.
diffusion_function
The |Function| `d(x)` with ``shape_range == tuple()`` or
``shape_range = (grid.dim_outer, grid.dim_outer)``. If `None`, constant one is
assumed.
diffusion_constant
The constant `c`. If `None`, `c` is set to one.
dirichlet_clear_columns
If `True`, set columns of the system matrix corresponding to Dirichlet boundary
DOFs to zero to obtain a symmetric system matrix. Otherwise, only the rows will
be set to zero.
dirichlet_clear_diag
If `True`, also set diagonal entries corresponding to Dirichlet boundary DOFs to
zero (e.g. for affine decomposition). Otherwise they are set to one.
name
Name of the operator.
"""
sparse = True
def __init__(self, grid, boundary_info, diffusion_function=None, diffusion_constant=None,
dirichlet_clear_columns=False, dirichlet_clear_diag=False, name=None):
assert grid.reference_element(0) in {square}, 'A square grid is expected!'
assert diffusion_function is None \
or (isinstance(diffusion_function, FunctionInterface) and
diffusion_function.dim_domain == grid.dim_outer and
diffusion_function.shape_range == tuple() or diffusion_function.shape_range == (grid.dim_outer,) * 2)
self.source = self.range = NumpyVectorSpace(grid.size(grid.dim))
self.grid = grid
self.boundary_info = boundary_info
self.diffusion_constant = diffusion_constant
self.diffusion_function = diffusion_function
self.dirichlet_clear_columns = dirichlet_clear_columns
self.dirichlet_clear_diag = dirichlet_clear_diag
self.name = name
if diffusion_function is not None:
self.build_parameter_type(inherits=(diffusion_function,))
def _assemble(self, mu=None):
g = self.grid
bi = self.boundary_info
# gradients of shape functions
if g.dim == 2:
q, w = g.reference_element.quadrature(order=2)
SF_GRAD = np.array(([q[..., 1] - 1., q[..., 0] - 1.],
[1. - q[..., 1], -q[..., 0]],
[q[..., 1], q[..., 0]],
[-q[..., 1], 1. - q[..., 0]]))
else:
raise NotImplementedError
self.logger.info('Calulate gradients of shape functions transformed by reference map ...')
SF_GRADS = np.einsum('eij,pjc->epic', g.jacobian_inverse_transposed(0), SF_GRAD)
self.logger.info('Calculate all local scalar products beween gradients ...')
if self.diffusion_function is not None and self.diffusion_function.shape_range == tuple():
D = self.diffusion_function(self.grid.centers(0), mu=mu)
SF_INTS = np.einsum('epic,eqic,c,e,e->epq', SF_GRADS, SF_GRADS, w, g.integration_elements(0), D).ravel()
del D
elif self.diffusion_function is not None:
D = self.diffusion_function(self.grid.centers(0), mu=mu)
SF_INTS = np.einsum('epic,eqjc,c,e,eij->epq', SF_GRADS, SF_GRADS, w, g.integration_elements(0), D).ravel()
del D
else:
SF_INTS = np.einsum('epic,eqic,c,e->epq', SF_GRADS, SF_GRADS, w, g.integration_elements(0)).ravel()
del SF_GRADS
if self.diffusion_constant is not None:
SF_INTS *= self.diffusion_constant
self.logger.info('Determine global dofs ...')
SF_I0 = np.repeat(g.subentities(0, g.dim), 4, axis=1).ravel()
SF_I1 = np.tile(g.subentities(0, g.dim), [1, 4]).ravel()
self.logger.info('Boundary treatment ...')
if bi.has_dirichlet:
SF_INTS = np.where(bi.dirichlet_mask(g.dim)[SF_I0], 0, SF_INTS)
if self.dirichlet_clear_columns:
SF_INTS = np.where(bi.dirichlet_mask(g.dim)[SF_I1], 0, SF_INTS)
if not self.dirichlet_clear_diag:
SF_INTS = np.hstack((SF_INTS, np.ones(bi.dirichlet_boundaries(g.dim).size)))
SF_I0 = np.hstack((SF_I0, bi.dirichlet_boundaries(g.dim)))
SF_I1 = np.hstack((SF_I1, bi.dirichlet_boundaries(g.dim)))
self.logger.info('Assemble system matrix ...')
A = coo_matrix((SF_INTS, (SF_I0, SF_I1)), shape=(g.size(g.dim), g.size(g.dim)))
del SF_INTS, SF_I0, SF_I1
A = csc_matrix(A).copy()
# The call to copy() is necessary to resize the data arrays of the sparse matrix:
# During the conversion to crs_matrix, entries corresponding with the same
# coordinates are summed up, resulting in shorter data arrays. The shortening
# is implemented by calling self.prune() which creates the view self.data[:self.nnz].
# Thus, the original data array is not deleted and all memory stays allocated.
return A
class RobinBoundaryOperator(NumpyMatrixBasedOperator):
"""Robin boundary |Operator| for linear finite elements.
The operator represents the contribution of Robin boundary conditions to the
stiffness matrix, where the boundary condition is supposed to be given in the
form ::
-[ d(x) ∇u(x) ] ⋅ n(x) = c(x) (u(x) - g(x))
`d` and `n` are the diffusion function (see `DiffusionOperatorP1`) and the
unit outer normal in `x`, while `c` is the (scalar) Robin parameter
function and `g` is the (also scalar) Robin boundary value function.
Parameters
----------
grid
The |Grid| over which to assemble the operator
boundary_info
|BoundaryInfo| for the treatment of Dirichlet boundary conditions
robin_data
Tuple providing two |Functions| that represent the Robin parameter and boundary
value function. If `None`, the resulting operator is zero.
name
Name of the operator
"""
sparse = True
def __init__(self, grid, boundary_info, robin_data=None, order=2, name=None):
assert robin_data is None or (isinstance(robin_data, tuple) and len(robin_data) == 2)
assert robin_data is None or all([isinstance(f, FunctionInterface)
and f.dim_domain == grid.dim_outer
and f.shape_range == tuple() for f in robin_data])
self.source = self.range = NumpyVectorSpace(grid.size(grid.dim))
self.grid = grid
self.boundary_info = boundary_info
self.robin_data = robin_data
self.name = name
self.order = order
def _assemble(self, mu=None):
g = self.grid
bi = self.boundary_info
if g.dim > 2:
raise NotImplementedError
if bi is None or not bi.has_robin or self.robin_data is None:
return coo_matrix((g.size(g.dim), g.size(g.dim))).tocsc()
RI = bi.robin_boundaries(1)
if g.dim == 1:
robin_c = self.robin_data[0](g.centers(1)[RI], mu=mu)
I = coo_matrix((robin_c, (RI, RI)), shape=(g.size(g.dim), g.size(g.dim)))
return csc_matrix(I).copy()
else:
xref = g.quadrature_points(1, order=self.order)[RI]
robin_c = self.robin_data[0](xref, mu=mu)
q, w = line.quadrature(order=self.order)
SF = np.squeeze(np.array([1 - q, q]))
SF_INTS = np.einsum('ep,pi,pj,e,p->eij', robin_c, SF, SF, g.integration_elements(1)[RI], w).ravel()
SF_I0 = np.repeat(g.subentities(1, g.dim)[RI], 2).ravel()
SF_I1 = np.tile(g.subentities(1, g.dim)[RI], [1, 2]).ravel()
I = coo_matrix((SF_INTS, (SF_I0, SF_I1)), shape=(g.size(g.dim), g.size(g.dim)))
return csc_matrix(I).copy()
class InterpolationOperator(NumpyMatrixBasedOperator):
"""Lagrange interpolation operator for continuous finite element spaces.
Parameters
----------
grid
The |Grid| on which to interpolate.
function
The |Function| to interpolate.
"""
source = NumpyVectorSpace(1)
linear = True
def __init__(self, grid, function):
assert function.dim_domain == grid.dim_outer
assert function.shape_range == tuple()
self.grid = grid
self.function = function
self.range = NumpyVectorSpace(grid.size(grid.dim))
self.build_parameter_type(inherits=(function,))
def _assemble(self, mu=None):
return self.function.evaluate(self.grid.centers(self.grid.dim), mu=mu).reshape((-1, 1))
| 44.062992
| 118
| 0.62369
| 4,539
| 33,576
| 4.484248
| 0.078211
| 0.014346
| 0.014543
| 0.008843
| 0.892061
| 0.883414
| 0.866513
| 0.850103
| 0.840081
| 0.834873
| 0
| 0.011393
| 0.265428
| 33,576
| 761
| 119
| 44.120894
| 0.813534
| 0.319842
| 0
| 0.776382
| 0
| 0
| 0.044403
| 0.001
| 0
| 0
| 0
| 0.002628
| 0.035176
| 1
| 0.040201
| false
| 0
| 0.017588
| 0.002513
| 0.130653
| 0.002513
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6aa2f7e0406cadc8cde47adef1e3f6fc5f8c9080
| 150
|
py
|
Python
|
src/sentry_plugins/pushover/__init__.py
|
MattPark/sentry-plugins
|
3b08a43ea9ca1fb0fd183c3fe7bd5606f14ba993
|
[
"Apache-2.0"
] | null | null | null |
src/sentry_plugins/pushover/__init__.py
|
MattPark/sentry-plugins
|
3b08a43ea9ca1fb0fd183c3fe7bd5606f14ba993
|
[
"Apache-2.0"
] | 2
|
2018-05-26T13:19:41.000Z
|
2018-06-01T20:14:41.000Z
|
src/sentry_plugins/pushover/__init__.py
|
MattPark/sentry-plugins
|
3b08a43ea9ca1fb0fd183c3fe7bd5606f14ba993
|
[
"Apache-2.0"
] | 1
|
2018-05-26T11:45:46.000Z
|
2018-05-26T11:45:46.000Z
|
from __future__ import absolute_import
from sentry_plugins.base import assert_package_not_installed
assert_package_not_installed('sentry-pushover')
| 25
| 60
| 0.886667
| 20
| 150
| 6.05
| 0.6
| 0.214876
| 0.264463
| 0.413223
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073333
| 150
| 5
| 61
| 30
| 0.870504
| 0
| 0
| 0
| 0
| 0
| 0.1
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
0aa0beadbab7d7826a09e6d0707c2df5af396b1f
| 20,858
|
py
|
Python
|
frgpascal/experimentaldesign/scheduler.py
|
fenning-research-group/PASCAL
|
c175e60ec2015ac8dce1ed3ae037619072d48cea
|
[
"MIT"
] | 1
|
2022-02-11T22:31:32.000Z
|
2022-02-11T22:31:32.000Z
|
frgpascal/experimentaldesign/scheduler.py
|
fenning-research-group/PASCAL
|
c175e60ec2015ac8dce1ed3ae037619072d48cea
|
[
"MIT"
] | 7
|
2021-05-06T16:07:51.000Z
|
2021-08-07T15:18:06.000Z
|
frgpascal/experimentaldesign/scheduler.py
|
fenning-research-group/PASCAL
|
c175e60ec2015ac8dce1ed3ae037619072d48cea
|
[
"MIT"
] | 1
|
2021-12-11T00:42:33.000Z
|
2021-12-11T00:42:33.000Z
|
from ortools.sat.python import cp_model
import matplotlib.pyplot as plt
import numpy as np
from frgpascal.workers import (
Worker_Characterization,
Worker_GantryGripper,
Worker_Hotplate,
Worker_SpincoaterLiquidHandler,
Worker_Storage,
)
from frgpascal.experimentaldesign.tasks import Task, generate_sample_worklist
# from frgpascal.experimentaldesign.tasks import workers
workers = {
Worker_Characterization: 1,
Worker_GantryGripper: 1,
Worker_Hotplate: 25,
Worker_SpincoaterLiquidHandler: 1,
Worker_Storage: 45,
}
### Task Scheduler
class Scheduler:
def __init__(
self,
samples,
spanning_tasks=None,
enforce_sample_order=False,
prioritize_first_spincoat=False,
):
self.workers = workers
self.samples = samples
if spanning_tasks is None:
self.spanning_tasks = []
else:
self.spanning_tasks = spanning_tasks
self.enforce_sample_order = enforce_sample_order
self.prioritize_first_spincoat = prioritize_first_spincoat
def _generate_worklists(self):
for s in self.samples:
s.tasks = generate_sample_worklist(s)
self.tasks = {s.name: s.tasks for s in self.samples}
self.tasklist = [
t for sample_tasks in self.tasks.values() for t in sample_tasks
]
self.horizon = int(sum([t.duration for t in self.tasklist]))
self.first_spincoats = []
self.later_spincoats = []
for s in self.samples:
tasks = s.tasks
n = 0
for t in tasks:
if t.task == "spincoat":
if n == 0:
self.first_spincoats.append(t)
elif n == 1:
self.later_spincoats.append(t)
n += 1
def _initialize_model(self):
self._generate_worklists()
self.model = cp_model.CpModel()
ending_variables = []
machine_intervals = {w: [] for w in self.workers}
# reservoirs = {}
### Task Constraints
for task in self.tasklist:
task.end_var = self.model.NewIntVar(
task.duration, self.horizon, "end " + str(task.taskid)
)
ending_variables.append(task.end_var)
for task in self.tasklist:
## connect to preceding tasks
if task.immediate:
task.start_var = task.precedent.end_var
else:
task.start_var = self.model.NewIntVar(
0, self.horizon, "start " + str(task.taskid)
)
if task.precedent is not None:
self.model.Add(task.start_var >= task.precedent.end_var)
## mark workers as occupied during this task
interval_var = self.model.NewIntervalVar(
task.start_var,
task.duration,
task.end_var,
"interval " + str(task.taskid),
)
for w in task.workers:
machine_intervals[w].append(interval_var)
### Force sequential tasks to preserve order even if not immediate #TODO this is not generalizable!
spanning_tasks = {c: [] for c in self.spanning_tasks}
for sample, tasks in self.tasks.items():
for t0, t1, t2 in zip(tasks, tasks[1:], tasks[2:]):
if t1.task in spanning_tasks:
duration = self.model.NewIntVar(0, self.horizon, "duration")
interval = self.model.NewIntervalVar(
t0.start_var, duration, t2.end_var, "sampleinterval"
)
spanning_tasks[t1.task].append(interval)
for intervals in spanning_tasks.values():
self.model.AddNoOverlap(intervals)
### Force sample order if flagged
if self.enforce_sample_order:
for preceding_sample, sample in zip(self.samples, self.samples[1:]):
self.model.Add(
sample.tasks[0].start_var > preceding_sample.tasks[0].start_var
)
if self.prioritize_first_spincoat:
# first_sc_duration = self.model.NewIntVar(
# 0, self.horizon, "first_sc_duration"
# )
# first_sc_interval = self.model.NewIntervalVar(
# self.first_spincoats[0].start_var,
# first_sc_duration,
# self.first_spincoats[-1].end_var,
# "first_sc_interval",
# )
# later_sc_duration = self.model.NewIntVar(
# 0, self.horizon, "later_sc_duration"
# )
# later_sc_interval = self.model.NewIntervalVar(
# self.later_spincoats[0].start_var,
# later_sc_duration,
# self.later_spincoats[-1].end_var,
# "later_sc_interval",
# )
# self.model.AddNoOverlap([first_sc_interval, later_sc_interval])
# last_first_sc = self.first_spincoats[-1]
# first_later_sc = self.later_spincoats[0]
# self.model.Add(last_first_sc.start_var > first_later_sc.start_var)
first_sc = self.model.NewIntVar(0, self.horizon, "firstsc")
self.model.AddMaxEquality(
first_sc, [t.end_var for t in self.first_spincoats]
)
later_sc = self.model.NewIntVar(0, self.horizon, "latersc")
self.model.AddMinEquality(
later_sc, [t.end_var for t in self.later_spincoats]
)
self.model.Add(later_sc >= first_sc)
### Worker Constraints
for w, capacity in self.workers.items():
intervals = machine_intervals[w]
if capacity > 1:
demands = [1 for _ in machine_intervals[w]]
self.model.AddCumulative(intervals, demands, capacity)
else:
self.model.AddNoOverlap(intervals)
objective_var = self.model.NewIntVar(0, self.horizon, "makespan")
self.model.AddMaxEquality(objective_var, ending_variables)
self.model.Minimize(objective_var)
def _generate_ordered_tasklist(self):
ordered_tasks = [task for sample in self.samples for task in sample.tasks]
ordered_tasks.sort(key=lambda x: x.start)
ordered_tasks = self._insert_idlegantry_steps(ordered_tasks)
return ordered_tasks
def _insert_idlegantry_steps(self, ordered_tasks):
gg_tasks = [t for t in ordered_tasks if Worker_GantryGripper in t.workers]
for precedent, task in zip(gg_tasks, gg_tasks[1:]):
gantry_idle_time = task.start - precedent.end
if precedent.task.endswith("to_hotplate") and gantry_idle_time > 10:
idle_task = Task(
task="idle_gantry",
sample=precedent.sample,
precedent=precedent,
immediate=True,
)
idle_task.start = precedent.start + 1
idle_task.end = idle_task.start + idle_task.duration
ordered_tasks.append(idle_task)
sample_tasklist = precedent.sample.tasks
idx = sample_tasklist.index(precedent)
sample_tasklist.insert(idx + 1, idle_task)
# print(
# f"added idle_gantry between {precedent} and {task} (idle time of {gantry_idle_time} seconds)"
# )
ordered_tasks.sort(key=lambda x: x.start)
return ordered_tasks
def solve(self, solve_time=5):
self._initialize_model()
self.solver = cp_model.CpSolver()
self.solver.parameters.max_time_in_seconds = solve_time
self.solver.parameters.num_search_workers = 0 # use all cores
status = self.solver.Solve(self.model)
print(f"solution status: {self.solver.StatusName()}")
# if status in [3, 4]:
# return
for s in self.samples:
for task in s.tasks:
task.start = self.solver.Value(task.start_var)
task.end = self.solver.Value(task.end_var)
self.plot_solution()
return self._generate_ordered_tasklist()
def plot_solution(self, ax=None):
plt.figure(figsize=(14, 5))
for idx, (sample, tasklist) in enumerate(self.tasks.items()):
color = plt.cm.tab20(idx % 20)
offset = 0.2 + 0.6 * (idx / len(self.tasks))
for t in tasklist:
for w in t.workers:
y = [list(self.workers.keys()).index(w) + offset] * 2
x = [t.start / 60, t.end / 60]
plt.plot(x, y, color=color)
plt.yticks(
range(len(self.workers)),
labels=[str(w).split("Worker_")[1][:-2] for w in self.workers],
)
plt.xlabel("Time (minutes)")
xlim0 = plt.xlim()
plt.hlines(
[i for i in range(1, len(workers))],
*xlim0,
colors="k",
alpha=0.1,
linestyles="dotted",
)
plt.xlim(xlim0)
ax = plt.twiny()
ax.set_xlim([x / 60 for x in xlim0])
ax.set_xlabel("Time (hours)")
# def _set_start_time(self, task):
# for pid in task.precedents:
# ptidx = self.tasks.index(pid)
# pt = self.tasks[ptidx]
# if pt.end_time < task.start_time:
# task.start_time = pt.end_time
class Scheduler_PrioritizeFirstSpincoat:
def __init__(
self,
samples,
spanning_tasks=None,
enforce_sample_order=False,
):
self.workers = workers
self.samples = samples
if spanning_tasks is None:
self.spanning_tasks = []
else:
self.spanning_tasks = spanning_tasks
self.enforce_sample_order = enforce_sample_order
def _generate_worklists(self):
for s in self.samples:
s.tasks = generate_sample_worklist(s)
self.tasks = {s.name: s.tasks for s in self.samples}
self.tasklist = [
t for sample_tasks in self.tasks.values() for t in sample_tasks
]
self.horizon = int(sum([t.duration for t in self.tasklist]))
self.first_tasklist = []
self.second_tasklist = []
for sample_tasks in self.tasks.values():
first = True
done = False
for t in sample_tasks:
if not first and done:
self.second_tasklist.append(t)
else:
self.first_tasklist.append(t)
if t.task == "spincoat":
first = False
if not first and t.task == "rest":
done = True
self.first_tasks = {
s.name: [t for t in s.tasks if t in self.first_tasklist]
for s in self.samples
}
self.second_tasks = {
s.name: [t for t in s.tasks if t in self.second_tasklist]
for s in self.samples
}
def _initialize_model_first(self):
self._generate_worklists()
self.model = cp_model.CpModel()
ending_variables = []
machine_intervals = {w: [] for w in self.workers}
# reservoirs = {}
### Task Constraints
for task in self.first_tasklist:
task.end_var = self.model.NewIntVar(
task.duration, self.horizon, "end " + str(task.taskid)
)
ending_variables.append(task.end_var)
for task in self.first_tasklist:
## connect to preceding tasks
if task.immediate:
task.start_var = task.precedent.end_var
else:
task.start_var = self.model.NewIntVar(
0, self.horizon, "start " + str(task.taskid)
)
if task.precedent is not None:
self.model.Add(task.start_var >= task.precedent.end_var)
## mark workers as occupied during this task
interval_var = self.model.NewIntervalVar(
task.start_var,
task.duration,
task.end_var,
"interval " + str(task.taskid),
)
for w in task.workers:
machine_intervals[w].append(interval_var)
### Force sequential tasks to preserve order even if not immediate #TODO this is not generalizable!
spanning_tasks = {c: [] for c in self.spanning_tasks}
for sample, tasks in self.first_tasks.items():
for t0, t1, t2 in zip(tasks, tasks[1:], tasks[2:]):
if t1.task in spanning_tasks:
duration = self.model.NewIntVar(0, self.horizon, "duration")
interval = self.model.NewIntervalVar(
t0.start_var, duration, t2.end_var, "sampleinterval"
)
spanning_tasks[t1.task].append(interval)
for intervals in spanning_tasks.values():
self.model.AddNoOverlap(intervals)
### Force sample order if flagged
if self.enforce_sample_order:
for preceding_sample, sample in zip(self.samples, self.samples[1:]):
self.model.Add(
sample.tasks[0].start_var > preceding_sample.tasks[0].start_var
)
### Worker Constraints
for w, capacity in self.workers.items():
intervals = machine_intervals[w]
if capacity > 1:
demands = [1 for _ in machine_intervals[w]]
self.model.AddCumulative(intervals, demands, capacity)
else:
self.model.AddNoOverlap(intervals)
objective_var = self.model.NewIntVar(0, self.horizon, "makespan")
self.model.AddMaxEquality(objective_var, ending_variables)
self.model.Minimize(objective_var)
def _initialize_model_second(self):
ending_variables = []
self.model = cp_model.CpModel()
machine_intervals = {w: [] for w in self.workers}
# reservoirs = {}
### Task Constraints
for task in self.tasklist:
if not np.isnan(task.end):
task.end_var = self.model.NewConstant(task.end)
else:
task.end_var = self.model.NewIntVar(
task.duration, self.horizon, "end " + str(task.taskid)
)
ending_variables.append(task.end_var)
for task in self.tasklist:
## connect to preceding tasks
if task.immediate:
task.start_var = task.precedent.end_var
else:
if not np.isnan(task.start):
task.start_var = self.model.NewConstant(task.start)
else:
task.start_var = self.model.NewIntVar(
0, self.horizon, "start " + str(task.taskid)
)
if task.precedent is not None:
self.model.Add(task.start_var >= task.precedent.end_var)
## mark workers as occupied during this task
interval_var = self.model.NewIntervalVar(
task.start_var,
task.duration,
task.end_var,
"interval " + str(task.taskid),
)
for w in task.workers:
machine_intervals[w].append(interval_var)
### Force sequential tasks to preserve order even if not immediate #TODO this is not generalizable!
spanning_tasks = {c: [] for c in self.spanning_tasks}
for sample, tasks in self.tasks.items():
for t0, t1, t2 in zip(tasks, tasks[1:], tasks[2:]):
if t1.task in spanning_tasks:
duration = self.model.NewIntVar(0, self.horizon, "duration")
interval = self.model.NewIntervalVar(
t0.start_var, duration, t2.end_var, "sampleinterval"
)
spanning_tasks[t1.task].append(interval)
for intervals in spanning_tasks.values():
self.model.AddNoOverlap(intervals)
# ### Force sample order if flagged
# if self.enforce_sample_order:
# for preceding_sample, sample in zip(self.samples, self.samples[1:]):
# self.model.Add(
# sample.tasks[0].start_var > preceding_sample.tasks[0].start_var
# )
### Worker Constraints
for w, capacity in self.workers.items():
intervals = machine_intervals[w]
if capacity > 1:
demands = [1 for _ in machine_intervals[w]]
self.model.AddCumulative(intervals, demands, capacity)
else:
self.model.AddNoOverlap(intervals)
objective_var = self.model.NewIntVar(0, self.horizon, "makespan")
self.model.AddMaxEquality(objective_var, ending_variables)
self.model.Minimize(objective_var)
def _generate_ordered_tasklist(self):
ordered_tasks = [task for sample in self.samples for task in sample.tasks]
ordered_tasks.sort(key=lambda x: x.start)
ordered_tasks = self._insert_idlegantry_steps(ordered_tasks)
return ordered_tasks
def _insert_idlegantry_steps(self, ordered_tasks):
gg_tasks = [t for t in ordered_tasks if Worker_GantryGripper in t.workers]
for precedent, task in zip(gg_tasks, gg_tasks[1:]):
gantry_idle_time = task.start - precedent.end
if precedent.task.endswith("to_hotplate") and gantry_idle_time > 10:
idle_task = Task(
task="idle_gantry",
sample=precedent.sample,
precedent=precedent,
immediate=True,
)
idle_task.start = precedent.start + 1
idle_task.end = idle_task.start + idle_task.duration
ordered_tasks.append(idle_task)
sample_tasklist = precedent.sample.tasks
idx = sample_tasklist.index(precedent)
sample_tasklist.insert(idx + 1, idle_task)
# print(
# f"added idle_gantry between {precedent} and {task} (idle time of {gantry_idle_time} seconds)"
# )
ordered_tasks.sort(key=lambda x: x.start)
return ordered_tasks
def solve(self, solve_time=5):
self._initialize_model_first()
self.solver = cp_model.CpSolver()
self.solver.parameters.max_time_in_seconds = solve_time
self.solver.parameters.num_search_workers = 0 # use all cores
status = self.solver.Solve(self.model)
print(f"first round status: {self.solver.StatusName()}")
for task in self.first_tasklist:
task.start = self.solver.Value(task.start_var)
task.end = self.solver.Value(task.end_var)
self._initialize_model_second()
self.solver.parameters.max_time_in_seconds = solve_time
self.solver.parameters.num_search_workers = 0 # use all cores
status = self.solver.Solve(self.model)
print(f"second round status: {self.solver.StatusName()}")
for s in self.samples:
for task in s.tasks:
task.start = self.solver.Value(task.start_var)
task.end = self.solver.Value(task.end_var)
self.plot_solution()
return self._generate_ordered_tasklist()
def plot_solution(self, ax=None):
plt.figure(figsize=(14, 5))
for idx, (sample, tasklist) in enumerate(self.tasks.items()):
color = plt.cm.tab20(idx % 20)
offset = 0.2 + 0.6 * (idx / len(self.tasks))
for t in tasklist:
for w in t.workers:
y = [list(self.workers.keys()).index(w) + offset] * 2
x = [t.start / 60, t.end / 60]
plt.plot(x, y, color=color)
plt.yticks(
range(len(self.workers)),
labels=[str(w).split("Worker_")[1][:-2] for w in self.workers],
)
plt.xlabel("Time (minutes)")
xlim0 = plt.xlim()
plt.hlines(
[i for i in range(1, len(workers))],
*xlim0,
colors="k",
alpha=0.1,
linestyles="dotted",
)
plt.xlim(xlim0)
ax = plt.twiny()
ax.set_xlim([x / 60 for x in xlim0])
ax.set_xlabel("Time (hours)")
# def _set_start_time(self, task):
# for pid in task.precedents:
# ptidx = self.tasks.index(pid)
# pt = self.tasks[ptidx]
# if pt.end_time < task.start_time:
# task.start_time = pt.end_time
| 39.805344
| 115
| 0.563908
| 2,397
| 20,858
| 4.736754
| 0.087192
| 0.045975
| 0.025366
| 0.021754
| 0.871587
| 0.843932
| 0.826757
| 0.81566
| 0.805267
| 0.805267
| 0
| 0.01042
| 0.342027
| 20,858
| 523
| 116
| 39.881453
| 0.81689
| 0.118707
| 0
| 0.761787
| 0
| 0
| 0.024135
| 0.004269
| 0
| 0
| 0
| 0.001912
| 0
| 1
| 0.037221
| false
| 0
| 0.012407
| 0
| 0.069479
| 0.007444
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0ab53fd7c3bc60d49ba2f8d73ecef54aac9daf92
| 246,097
|
py
|
Python
|
pybind/slxos/v16r_1_00b/brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v16r_1_00b/brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v16r_1_00b/brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | 1
|
2021-11-05T22:15:42.000Z
|
2021-11-05T22:15:42.000Z
|
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import lsp_config_admin_groups
import lsp_cspf_path_hops
import lsp_cspf_exclude_hops
import lsp_rsvp_session_rro_hops
class lsp_instances(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-mpls - based on the path /brocade_mpls_rpc/show-mpls-bypass-lsp-name-debug/output/bypass-lsp/show-mpls-lsp-extensive-info/show-mpls-lsp-instances-info/lsp-instances. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__lsp_instance_number','__lsp_current_instance','__lsp_new_instance','__lsp_old_instance','__lsp_is_primary','__lsp_is_current_secondary','__lsp_is_selected_secondary','__lsp_config_path_configured','__lsp_config_path','__lsp_config_reoptimize_timer_configured','__lsp_config_reoptimize_timer','__lsp_config_tspec_mtu_configured','__lsp_config_tspec_mtu','__lsp_config_cos_configured','__lsp_config_cos','__lsp_config_mtu_configured','__lsp_config_mtu','__lsp_config_tie_breaking_configured','__lsp_config_tie_break_random','__lsp_config_tie_break_least_fill','__lsp_config_tie_break_most_fill','__lsp_config_cspf_disabled','__lsp_config_hot_standby','__lsp_config_pinned','__lsp_config_persistenct','__lsp_config_frr_global_revertive','__lsp_config_frr_hold_time','__lsp_config_soft_prempt','__lsp_config_exclude_interface_change','__lsp_config_prority_configured','__lsp_config_setup_prority','__lsp_config_holding_prority','__lsp_config_hop_limit_configured','__lsp_config_hop_limit','__lsp_config_traffic_eng_rate_configured','__lsp_config_traffic_eng_mean_rate','__lsp_config_traffic_eng_max_rate','__lsp_config_traffic_eng_max_burst','__lsp_config_admin_group_configured','__lsp_config_admin_groups','__lsp_path_computed_by_cspf','__lsp_path_computed_by_interface_constraint','__lsp_cspf_computation_mode_default','__lsp_cspf_computation_mode_use_bypass_metric','__lsp_cspf_computation_mode_use_bypass_liberal','__lsp_cspf_group_computation_mode_default','__lsp_cspf_group_computation_mode_add_penalty','__lsp_cspf_group_computation_mode_exclude_groups','__lsp_cspf_group_computation_mode_high_cost','__lsp_cspf_path_cost','__lsp_cspf_path_area','__lsp_cspf_computation_error','__lsp_cspf_path_hops','__lsp_cspf_exclude_hops_present','__lsp_cspf_exclude_hops','__lsp_rsvp_session_present','__lsp_rsvp_session_state_up','__lsp_rsvp_session_state','__lsp_rsvp_session_path_error_code','__lsp_rsvp_session_path_error_value','__lsp_rsvp_session_path_error_node_address','__lsp_rsvp_session_rro_hops_present','__lsp_rsvp_session_rro_hops','__lsp_maximum_bandwidth','__lsp_unreserved_priority_0_bandwidth','__lsp_unreserved_priority_1_bandwidth','__lsp_unreserved_priority_2_bandwidth','__lsp_unreserved_priority_3_bandwidth','__lsp_unreserved_priority_4_bandwidth','__lsp_unreserved_priority_5_bandwidth','__lsp_unreserved_priority_6_bandwidth','__lsp_unreserved_priority_7_bandwidth',)
_yang_name = 'lsp-instances'
_rest_name = 'lsp-instances'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__lsp_cspf_path_cost = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-cspf-path-cost", rest_name="lsp-cspf-path-cost", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__lsp_old_instance = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-old-instance", rest_name="lsp-old-instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_cspf_path_area = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="lsp-cspf-path-area", rest_name="lsp-cspf-path-area", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='inet:ipv4-address', is_config=True)
self.__lsp_config_tie_breaking_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-tie-breaking-configured", rest_name="lsp-config-tie-breaking-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_config_tie_break_most_fill = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-tie-break-most-fill", rest_name="lsp-config-tie-break-most-fill", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_config_traffic_eng_max_burst = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-config-traffic-eng-max-burst", rest_name="lsp-config-traffic-eng-max-burst", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__lsp_config_pinned = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-pinned", rest_name="lsp-config-pinned", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_is_current_secondary = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-is-current-secondary", rest_name="lsp-is-current-secondary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_config_path = YANGDynClass(base=unicode, is_leaf=True, yang_name="lsp-config-path", rest_name="lsp-config-path", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
self.__lsp_rsvp_session_state_up = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-rsvp-session-state-up", rest_name="lsp-rsvp-session-state-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_config_admin_group_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-admin-group-configured", rest_name="lsp-config-admin-group-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_current_instance = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-current-instance", rest_name="lsp-current-instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_config_admin_groups = YANGDynClass(base=lsp_config_admin_groups.lsp_config_admin_groups, is_container='container', presence=False, yang_name="lsp-config-admin-groups", rest_name="lsp-config-admin-groups", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
self.__lsp_config_tspec_mtu = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-config-tspec-mtu", rest_name="lsp-config-tspec-mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__lsp_rsvp_session_present = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-rsvp-session-present", rest_name="lsp-rsvp-session-present", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_config_setup_prority = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="lsp-config-setup-prority", rest_name="lsp-config-setup-prority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)
self.__lsp_cspf_computation_mode_default = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-cspf-computation-mode-default", rest_name="lsp-cspf-computation-mode-default", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_cspf_group_computation_mode_add_penalty = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-cspf-group-computation-mode-add-penalty", rest_name="lsp-cspf-group-computation-mode-add-penalty", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_cspf_computation_error = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-cspf-computation-error", rest_name="lsp-cspf-computation-error", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__lsp_path_computed_by_interface_constraint = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-path-computed-by-interface-constraint", rest_name="lsp-path-computed-by-interface-constraint", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_rsvp_session_path_error_code = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-rsvp-session-path-error-code", rest_name="lsp-rsvp-session-path-error-code", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__lsp_config_traffic_eng_rate_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-traffic-eng-rate-configured", rest_name="lsp-config-traffic-eng-rate-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_config_traffic_eng_max_rate = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-config-traffic-eng-max-rate", rest_name="lsp-config-traffic-eng-max-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__lsp_is_selected_secondary = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-is-selected-secondary", rest_name="lsp-is-selected-secondary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_new_instance = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-new-instance", rest_name="lsp-new-instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_config_mtu = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-config-mtu", rest_name="lsp-config-mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__lsp_rsvp_session_rro_hops_present = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-rsvp-session-rro-hops-present", rest_name="lsp-rsvp-session-rro-hops-present", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_path_computed_by_cspf = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-path-computed-by-cspf", rest_name="lsp-path-computed-by-cspf", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_rsvp_session_rro_hops = YANGDynClass(base=lsp_rsvp_session_rro_hops.lsp_rsvp_session_rro_hops, is_container='container', presence=False, yang_name="lsp-rsvp-session-rro-hops", rest_name="lsp-rsvp-session-rro-hops", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
self.__lsp_config_hop_limit = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="lsp-config-hop-limit", rest_name="lsp-config-hop-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)
self.__lsp_config_soft_prempt = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-soft-prempt", rest_name="lsp-config-soft-prempt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_unreserved_priority_2_bandwidth = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-2-bandwidth", rest_name="lsp-unreserved-priority-2-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__lsp_config_cos_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-cos-configured", rest_name="lsp-config-cos-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_maximum_bandwidth = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-maximum-bandwidth", rest_name="lsp-maximum-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__lsp_unreserved_priority_4_bandwidth = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-4-bandwidth", rest_name="lsp-unreserved-priority-4-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__lsp_cspf_exclude_hops = YANGDynClass(base=lsp_cspf_exclude_hops.lsp_cspf_exclude_hops, is_container='container', presence=False, yang_name="lsp-cspf-exclude-hops", rest_name="lsp-cspf-exclude-hops", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
self.__lsp_config_holding_prority = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="lsp-config-holding-prority", rest_name="lsp-config-holding-prority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)
self.__lsp_config_cspf_disabled = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-cspf-disabled", rest_name="lsp-config-cspf-disabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_rsvp_session_state = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="lsp-rsvp-session-state", rest_name="lsp-rsvp-session-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)
self.__lsp_config_frr_global_revertive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-frr-global-revertive", rest_name="lsp-config-frr-global-revertive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_instance_number = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-instance-number", rest_name="lsp-instance-number", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__lsp_is_primary = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-is-primary", rest_name="lsp-is-primary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_config_tspec_mtu_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-tspec-mtu-configured", rest_name="lsp-config-tspec-mtu-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_config_path_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-path-configured", rest_name="lsp-config-path-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_config_tie_break_random = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-tie-break-random", rest_name="lsp-config-tie-break-random", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_rsvp_session_path_error_value = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-rsvp-session-path-error-value", rest_name="lsp-rsvp-session-path-error-value", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__lsp_config_hop_limit_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-hop-limit-configured", rest_name="lsp-config-hop-limit-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_unreserved_priority_3_bandwidth = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-3-bandwidth", rest_name="lsp-unreserved-priority-3-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__lsp_config_mtu_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-mtu-configured", rest_name="lsp-config-mtu-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_cspf_path_hops = YANGDynClass(base=lsp_cspf_path_hops.lsp_cspf_path_hops, is_container='container', presence=False, yang_name="lsp-cspf-path-hops", rest_name="lsp-cspf-path-hops", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
self.__lsp_unreserved_priority_1_bandwidth = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-1-bandwidth", rest_name="lsp-unreserved-priority-1-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__lsp_config_cos = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="lsp-config-cos", rest_name="lsp-config-cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)
self.__lsp_config_reoptimize_timer = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-config-reoptimize-timer", rest_name="lsp-config-reoptimize-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__lsp_cspf_exclude_hops_present = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-cspf-exclude-hops-present", rest_name="lsp-cspf-exclude-hops-present", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_rsvp_session_path_error_node_address = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="lsp-rsvp-session-path-error-node-address", rest_name="lsp-rsvp-session-path-error-node-address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='inet:ipv4-address', is_config=True)
self.__lsp_unreserved_priority_5_bandwidth = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-5-bandwidth", rest_name="lsp-unreserved-priority-5-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__lsp_cspf_group_computation_mode_exclude_groups = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-cspf-group-computation-mode-exclude-groups", rest_name="lsp-cspf-group-computation-mode-exclude-groups", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_unreserved_priority_7_bandwidth = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-7-bandwidth", rest_name="lsp-unreserved-priority-7-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__lsp_unreserved_priority_0_bandwidth = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-0-bandwidth", rest_name="lsp-unreserved-priority-0-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__lsp_config_frr_hold_time = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-frr-hold-time", rest_name="lsp-config-frr-hold-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_config_tie_break_least_fill = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-tie-break-least-fill", rest_name="lsp-config-tie-break-least-fill", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_cspf_group_computation_mode_default = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-cspf-group-computation-mode-default", rest_name="lsp-cspf-group-computation-mode-default", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_cspf_computation_mode_use_bypass_metric = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-cspf-computation-mode-use-bypass-metric", rest_name="lsp-cspf-computation-mode-use-bypass-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_config_persistenct = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-persistenct", rest_name="lsp-config-persistenct", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_config_prority_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-prority-configured", rest_name="lsp-config-prority-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_config_exclude_interface_change = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-exclude-interface-change", rest_name="lsp-config-exclude-interface-change", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_config_traffic_eng_mean_rate = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-config-traffic-eng-mean-rate", rest_name="lsp-config-traffic-eng-mean-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__lsp_unreserved_priority_6_bandwidth = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-6-bandwidth", rest_name="lsp-unreserved-priority-6-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__lsp_config_reoptimize_timer_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-reoptimize-timer-configured", rest_name="lsp-config-reoptimize-timer-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_cspf_computation_mode_use_bypass_liberal = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-cspf-computation-mode-use-bypass-liberal", rest_name="lsp-cspf-computation-mode-use-bypass-liberal", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_cspf_group_computation_mode_high_cost = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-cspf-group-computation-mode-high-cost", rest_name="lsp-cspf-group-computation-mode-high-cost", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_config_hot_standby = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-hot-standby", rest_name="lsp-config-hot-standby", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'brocade_mpls_rpc', u'show-mpls-bypass-lsp-name-debug', u'output', u'bypass-lsp', u'show-mpls-lsp-extensive-info', u'show-mpls-lsp-instances-info', u'lsp-instances']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'show-mpls-bypass-lsp-name-debug', u'output', u'bypass-lsp', u'lsp-instances']
def _get_lsp_instance_number(self):
"""
Getter method for lsp_instance_number, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_instance_number (uint32)
YANG Description: LSP instance number
"""
return self.__lsp_instance_number
def _set_lsp_instance_number(self, v, load=False):
"""
Setter method for lsp_instance_number, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_instance_number (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_instance_number is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_instance_number() directly.
YANG Description: LSP instance number
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-instance-number", rest_name="lsp-instance-number", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_instance_number must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-instance-number", rest_name="lsp-instance-number", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__lsp_instance_number = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_instance_number(self):
self.__lsp_instance_number = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-instance-number", rest_name="lsp-instance-number", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_lsp_current_instance(self):
"""
Getter method for lsp_current_instance, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_current_instance (boolean)
YANG Description: LSP instance type
"""
return self.__lsp_current_instance
def _set_lsp_current_instance(self, v, load=False):
"""
Setter method for lsp_current_instance, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_current_instance (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_current_instance is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_current_instance() directly.
YANG Description: LSP instance type
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-current-instance", rest_name="lsp-current-instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_current_instance must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-current-instance", rest_name="lsp-current-instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_current_instance = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_current_instance(self):
self.__lsp_current_instance = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-current-instance", rest_name="lsp-current-instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_new_instance(self):
"""
Getter method for lsp_new_instance, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_new_instance (boolean)
YANG Description: LSP instance type
"""
return self.__lsp_new_instance
def _set_lsp_new_instance(self, v, load=False):
"""
Setter method for lsp_new_instance, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_new_instance (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_new_instance is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_new_instance() directly.
YANG Description: LSP instance type
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-new-instance", rest_name="lsp-new-instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_new_instance must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-new-instance", rest_name="lsp-new-instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_new_instance = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_new_instance(self):
self.__lsp_new_instance = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-new-instance", rest_name="lsp-new-instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_old_instance(self):
"""
Getter method for lsp_old_instance, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_old_instance (boolean)
YANG Description: LSP instance type
"""
return self.__lsp_old_instance
def _set_lsp_old_instance(self, v, load=False):
"""
Setter method for lsp_old_instance, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_old_instance (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_old_instance is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_old_instance() directly.
YANG Description: LSP instance type
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-old-instance", rest_name="lsp-old-instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_old_instance must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-old-instance", rest_name="lsp-old-instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_old_instance = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_old_instance(self):
self.__lsp_old_instance = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-old-instance", rest_name="lsp-old-instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_is_primary(self):
"""
Getter method for lsp_is_primary, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_is_primary (boolean)
YANG Description: LSP instance is primary
"""
return self.__lsp_is_primary
def _set_lsp_is_primary(self, v, load=False):
"""
Setter method for lsp_is_primary, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_is_primary (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_is_primary is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_is_primary() directly.
YANG Description: LSP instance is primary
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-is-primary", rest_name="lsp-is-primary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_is_primary must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-is-primary", rest_name="lsp-is-primary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_is_primary = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_is_primary(self):
self.__lsp_is_primary = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-is-primary", rest_name="lsp-is-primary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_is_current_secondary(self):
"""
Getter method for lsp_is_current_secondary, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_is_current_secondary (boolean)
YANG Description: LSP instance is primary
"""
return self.__lsp_is_current_secondary
def _set_lsp_is_current_secondary(self, v, load=False):
"""
Setter method for lsp_is_current_secondary, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_is_current_secondary (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_is_current_secondary is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_is_current_secondary() directly.
YANG Description: LSP instance is primary
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-is-current-secondary", rest_name="lsp-is-current-secondary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_is_current_secondary must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-is-current-secondary", rest_name="lsp-is-current-secondary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_is_current_secondary = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_is_current_secondary(self):
self.__lsp_is_current_secondary = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-is-current-secondary", rest_name="lsp-is-current-secondary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_is_selected_secondary(self):
"""
Getter method for lsp_is_selected_secondary, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_is_selected_secondary (boolean)
YANG Description: LSP instance is selected-secondary
"""
return self.__lsp_is_selected_secondary
def _set_lsp_is_selected_secondary(self, v, load=False):
"""
Setter method for lsp_is_selected_secondary, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_is_selected_secondary (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_is_selected_secondary is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_is_selected_secondary() directly.
YANG Description: LSP instance is selected-secondary
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-is-selected-secondary", rest_name="lsp-is-selected-secondary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_is_selected_secondary must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-is-selected-secondary", rest_name="lsp-is-selected-secondary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_is_selected_secondary = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_is_selected_secondary(self):
self.__lsp_is_selected_secondary = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-is-selected-secondary", rest_name="lsp-is-selected-secondary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_config_path_configured(self):
"""
Getter method for lsp_config_path_configured, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_path_configured (boolean)
YANG Description: LSP path is configured
"""
return self.__lsp_config_path_configured
def _set_lsp_config_path_configured(self, v, load=False):
"""
Setter method for lsp_config_path_configured, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_path_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_path_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_path_configured() directly.
YANG Description: LSP path is configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-config-path-configured", rest_name="lsp-config-path-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_path_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-path-configured", rest_name="lsp-config-path-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_config_path_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_path_configured(self):
self.__lsp_config_path_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-path-configured", rest_name="lsp-config-path-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_config_path(self):
"""
Getter method for lsp_config_path, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_path (string)
YANG Description: LSP path name
"""
return self.__lsp_config_path
def _set_lsp_config_path(self, v, load=False):
"""
Setter method for lsp_config_path, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_path (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_path is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_path() directly.
YANG Description: LSP path name
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="lsp-config-path", rest_name="lsp-config-path", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_path must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="lsp-config-path", rest_name="lsp-config-path", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)""",
})
self.__lsp_config_path = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_path(self):
self.__lsp_config_path = YANGDynClass(base=unicode, is_leaf=True, yang_name="lsp-config-path", rest_name="lsp-config-path", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
def _get_lsp_config_reoptimize_timer_configured(self):
"""
Getter method for lsp_config_reoptimize_timer_configured, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_reoptimize_timer_configured (boolean)
YANG Description: LSP reoptimization timer configured
"""
return self.__lsp_config_reoptimize_timer_configured
def _set_lsp_config_reoptimize_timer_configured(self, v, load=False):
"""
Setter method for lsp_config_reoptimize_timer_configured, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_reoptimize_timer_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_reoptimize_timer_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_reoptimize_timer_configured() directly.
YANG Description: LSP reoptimization timer configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-config-reoptimize-timer-configured", rest_name="lsp-config-reoptimize-timer-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_reoptimize_timer_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-reoptimize-timer-configured", rest_name="lsp-config-reoptimize-timer-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_config_reoptimize_timer_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_reoptimize_timer_configured(self):
self.__lsp_config_reoptimize_timer_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-reoptimize-timer-configured", rest_name="lsp-config-reoptimize-timer-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_config_reoptimize_timer(self):
"""
Getter method for lsp_config_reoptimize_timer, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_reoptimize_timer (uint32)
YANG Description: LSP reoptimization timer value
"""
return self.__lsp_config_reoptimize_timer
def _set_lsp_config_reoptimize_timer(self, v, load=False):
"""
Setter method for lsp_config_reoptimize_timer, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_reoptimize_timer (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_reoptimize_timer is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_reoptimize_timer() directly.
YANG Description: LSP reoptimization timer value
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-config-reoptimize-timer", rest_name="lsp-config-reoptimize-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_reoptimize_timer must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-config-reoptimize-timer", rest_name="lsp-config-reoptimize-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__lsp_config_reoptimize_timer = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_reoptimize_timer(self):
self.__lsp_config_reoptimize_timer = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-config-reoptimize-timer", rest_name="lsp-config-reoptimize-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_lsp_config_tspec_mtu_configured(self):
"""
Getter method for lsp_config_tspec_mtu_configured, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_tspec_mtu_configured (boolean)
YANG Description: LSP traffic spec mtu configured
"""
return self.__lsp_config_tspec_mtu_configured
def _set_lsp_config_tspec_mtu_configured(self, v, load=False):
"""
Setter method for lsp_config_tspec_mtu_configured, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_tspec_mtu_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_tspec_mtu_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_tspec_mtu_configured() directly.
YANG Description: LSP traffic spec mtu configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-config-tspec-mtu-configured", rest_name="lsp-config-tspec-mtu-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_tspec_mtu_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-tspec-mtu-configured", rest_name="lsp-config-tspec-mtu-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_config_tspec_mtu_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_tspec_mtu_configured(self):
self.__lsp_config_tspec_mtu_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-tspec-mtu-configured", rest_name="lsp-config-tspec-mtu-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_config_tspec_mtu(self):
"""
Getter method for lsp_config_tspec_mtu, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_tspec_mtu (uint32)
YANG Description: LSP traffic spec mtu value
"""
return self.__lsp_config_tspec_mtu
def _set_lsp_config_tspec_mtu(self, v, load=False):
"""
Setter method for lsp_config_tspec_mtu, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_tspec_mtu (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_tspec_mtu is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_tspec_mtu() directly.
YANG Description: LSP traffic spec mtu value
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-config-tspec-mtu", rest_name="lsp-config-tspec-mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_tspec_mtu must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-config-tspec-mtu", rest_name="lsp-config-tspec-mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__lsp_config_tspec_mtu = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_tspec_mtu(self):
self.__lsp_config_tspec_mtu = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-config-tspec-mtu", rest_name="lsp-config-tspec-mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_lsp_config_cos_configured(self):
"""
Getter method for lsp_config_cos_configured, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_cos_configured (boolean)
YANG Description: LSP cos value configured
"""
return self.__lsp_config_cos_configured
def _set_lsp_config_cos_configured(self, v, load=False):
"""
Setter method for lsp_config_cos_configured, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_cos_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_cos_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_cos_configured() directly.
YANG Description: LSP cos value configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-config-cos-configured", rest_name="lsp-config-cos-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_cos_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-cos-configured", rest_name="lsp-config-cos-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_config_cos_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_cos_configured(self):
self.__lsp_config_cos_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-cos-configured", rest_name="lsp-config-cos-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_config_cos(self):
"""
Getter method for lsp_config_cos, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_cos (uint8)
YANG Description: LSP cos value
"""
return self.__lsp_config_cos
def _set_lsp_config_cos(self, v, load=False):
"""
Setter method for lsp_config_cos, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_cos (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_cos is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_cos() directly.
YANG Description: LSP cos value
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="lsp-config-cos", rest_name="lsp-config-cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_cos must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="lsp-config-cos", rest_name="lsp-config-cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)""",
})
self.__lsp_config_cos = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_cos(self):
self.__lsp_config_cos = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="lsp-config-cos", rest_name="lsp-config-cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)
def _get_lsp_config_mtu_configured(self):
"""
Getter method for lsp_config_mtu_configured, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_mtu_configured (boolean)
YANG Description: LSP MTU value configured
"""
return self.__lsp_config_mtu_configured
def _set_lsp_config_mtu_configured(self, v, load=False):
"""
Setter method for lsp_config_mtu_configured, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_mtu_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_mtu_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_mtu_configured() directly.
YANG Description: LSP MTU value configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-config-mtu-configured", rest_name="lsp-config-mtu-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_mtu_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-mtu-configured", rest_name="lsp-config-mtu-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_config_mtu_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_mtu_configured(self):
self.__lsp_config_mtu_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-mtu-configured", rest_name="lsp-config-mtu-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_config_mtu(self):
"""
Getter method for lsp_config_mtu, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_mtu (uint32)
YANG Description: LSP MTU value
"""
return self.__lsp_config_mtu
def _set_lsp_config_mtu(self, v, load=False):
"""
Setter method for lsp_config_mtu, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_mtu (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_mtu is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_mtu() directly.
YANG Description: LSP MTU value
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-config-mtu", rest_name="lsp-config-mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_mtu must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-config-mtu", rest_name="lsp-config-mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__lsp_config_mtu = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_mtu(self):
self.__lsp_config_mtu = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-config-mtu", rest_name="lsp-config-mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_lsp_config_tie_breaking_configured(self):
"""
Getter method for lsp_config_tie_breaking_configured, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_tie_breaking_configured (boolean)
YANG Description: LSP CSPF tie-breaking configured
"""
return self.__lsp_config_tie_breaking_configured
def _set_lsp_config_tie_breaking_configured(self, v, load=False):
"""
Setter method for lsp_config_tie_breaking_configured, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_tie_breaking_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_tie_breaking_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_tie_breaking_configured() directly.
YANG Description: LSP CSPF tie-breaking configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-config-tie-breaking-configured", rest_name="lsp-config-tie-breaking-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_tie_breaking_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-tie-breaking-configured", rest_name="lsp-config-tie-breaking-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_config_tie_breaking_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_tie_breaking_configured(self):
self.__lsp_config_tie_breaking_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-tie-breaking-configured", rest_name="lsp-config-tie-breaking-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_config_tie_break_random(self):
"""
Getter method for lsp_config_tie_break_random, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_tie_break_random (boolean)
YANG Description: LSP cspf tie braking is random
"""
return self.__lsp_config_tie_break_random
def _set_lsp_config_tie_break_random(self, v, load=False):
"""
Setter method for lsp_config_tie_break_random, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_tie_break_random (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_tie_break_random is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_tie_break_random() directly.
YANG Description: LSP cspf tie braking is random
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-config-tie-break-random", rest_name="lsp-config-tie-break-random", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_tie_break_random must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-tie-break-random", rest_name="lsp-config-tie-break-random", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_config_tie_break_random = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_tie_break_random(self):
self.__lsp_config_tie_break_random = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-tie-break-random", rest_name="lsp-config-tie-break-random", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_config_tie_break_least_fill(self):
"""
Getter method for lsp_config_tie_break_least_fill, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_tie_break_least_fill (boolean)
YANG Description: LSP cspf tie braking is least fill
"""
return self.__lsp_config_tie_break_least_fill
def _set_lsp_config_tie_break_least_fill(self, v, load=False):
"""
Setter method for lsp_config_tie_break_least_fill, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_tie_break_least_fill (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_tie_break_least_fill is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_tie_break_least_fill() directly.
YANG Description: LSP cspf tie braking is least fill
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-config-tie-break-least-fill", rest_name="lsp-config-tie-break-least-fill", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_tie_break_least_fill must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-tie-break-least-fill", rest_name="lsp-config-tie-break-least-fill", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_config_tie_break_least_fill = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_tie_break_least_fill(self):
self.__lsp_config_tie_break_least_fill = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-tie-break-least-fill", rest_name="lsp-config-tie-break-least-fill", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_config_tie_break_most_fill(self):
"""
Getter method for lsp_config_tie_break_most_fill, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_tie_break_most_fill (boolean)
YANG Description: LSP cspf tie braking is most-fill
"""
return self.__lsp_config_tie_break_most_fill
def _set_lsp_config_tie_break_most_fill(self, v, load=False):
"""
Setter method for lsp_config_tie_break_most_fill, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_tie_break_most_fill (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_tie_break_most_fill is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_tie_break_most_fill() directly.
YANG Description: LSP cspf tie braking is most-fill
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-config-tie-break-most-fill", rest_name="lsp-config-tie-break-most-fill", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_tie_break_most_fill must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-tie-break-most-fill", rest_name="lsp-config-tie-break-most-fill", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_config_tie_break_most_fill = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_tie_break_most_fill(self):
self.__lsp_config_tie_break_most_fill = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-tie-break-most-fill", rest_name="lsp-config-tie-break-most-fill", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_config_cspf_disabled(self):
"""
Getter method for lsp_config_cspf_disabled, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_cspf_disabled (boolean)
YANG Description: LSP cspf disabled
"""
return self.__lsp_config_cspf_disabled
def _set_lsp_config_cspf_disabled(self, v, load=False):
"""
Setter method for lsp_config_cspf_disabled, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_cspf_disabled (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_cspf_disabled is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_cspf_disabled() directly.
YANG Description: LSP cspf disabled
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-config-cspf-disabled", rest_name="lsp-config-cspf-disabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_cspf_disabled must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-cspf-disabled", rest_name="lsp-config-cspf-disabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_config_cspf_disabled = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_cspf_disabled(self):
self.__lsp_config_cspf_disabled = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-cspf-disabled", rest_name="lsp-config-cspf-disabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_config_hot_standby(self):
"""
Getter method for lsp_config_hot_standby, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_hot_standby (boolean)
YANG Description: LSP is hot standby
"""
return self.__lsp_config_hot_standby
def _set_lsp_config_hot_standby(self, v, load=False):
"""
Setter method for lsp_config_hot_standby, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_hot_standby (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_hot_standby is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_hot_standby() directly.
YANG Description: LSP is hot standby
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-config-hot-standby", rest_name="lsp-config-hot-standby", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_hot_standby must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-hot-standby", rest_name="lsp-config-hot-standby", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_config_hot_standby = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_hot_standby(self):
self.__lsp_config_hot_standby = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-hot-standby", rest_name="lsp-config-hot-standby", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_config_pinned(self):
"""
Getter method for lsp_config_pinned, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_pinned (boolean)
YANG Description: LSP is pinned
"""
return self.__lsp_config_pinned
def _set_lsp_config_pinned(self, v, load=False):
"""
Setter method for lsp_config_pinned, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_pinned (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_pinned is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_pinned() directly.
YANG Description: LSP is pinned
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-config-pinned", rest_name="lsp-config-pinned", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_pinned must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-pinned", rest_name="lsp-config-pinned", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_config_pinned = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_pinned(self):
self.__lsp_config_pinned = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-pinned", rest_name="lsp-config-pinned", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_config_persistenct(self):
"""
Getter method for lsp_config_persistenct, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_persistenct (boolean)
YANG Description: LSP is persistent
"""
return self.__lsp_config_persistenct
def _set_lsp_config_persistenct(self, v, load=False):
"""
Setter method for lsp_config_persistenct, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_persistenct (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_persistenct is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_persistenct() directly.
YANG Description: LSP is persistent
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-config-persistenct", rest_name="lsp-config-persistenct", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_persistenct must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-persistenct", rest_name="lsp-config-persistenct", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_config_persistenct = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_persistenct(self):
self.__lsp_config_persistenct = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-persistenct", rest_name="lsp-config-persistenct", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_config_frr_global_revertive(self):
"""
Getter method for lsp_config_frr_global_revertive, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_frr_global_revertive (boolean)
YANG Description: LSP global revertiveness enabled
"""
return self.__lsp_config_frr_global_revertive
def _set_lsp_config_frr_global_revertive(self, v, load=False):
"""
Setter method for lsp_config_frr_global_revertive, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_frr_global_revertive (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_frr_global_revertive is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_frr_global_revertive() directly.
YANG Description: LSP global revertiveness enabled
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-config-frr-global-revertive", rest_name="lsp-config-frr-global-revertive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_frr_global_revertive must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-frr-global-revertive", rest_name="lsp-config-frr-global-revertive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_config_frr_global_revertive = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_frr_global_revertive(self):
self.__lsp_config_frr_global_revertive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-frr-global-revertive", rest_name="lsp-config-frr-global-revertive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_config_frr_hold_time(self):
"""
Getter method for lsp_config_frr_hold_time, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_frr_hold_time (boolean)
YANG Description: LSP FRR hold time
"""
return self.__lsp_config_frr_hold_time
def _set_lsp_config_frr_hold_time(self, v, load=False):
"""
Setter method for lsp_config_frr_hold_time, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_frr_hold_time (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_frr_hold_time is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_frr_hold_time() directly.
YANG Description: LSP FRR hold time
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-config-frr-hold-time", rest_name="lsp-config-frr-hold-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_frr_hold_time must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-frr-hold-time", rest_name="lsp-config-frr-hold-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_config_frr_hold_time = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_frr_hold_time(self):
self.__lsp_config_frr_hold_time = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-frr-hold-time", rest_name="lsp-config-frr-hold-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_config_soft_prempt(self):
"""
Getter method for lsp_config_soft_prempt, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_soft_prempt (boolean)
YANG Description: LSP soft preemption enabled
"""
return self.__lsp_config_soft_prempt
def _set_lsp_config_soft_prempt(self, v, load=False):
"""
Setter method for lsp_config_soft_prempt, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_soft_prempt (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_soft_prempt is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_soft_prempt() directly.
YANG Description: LSP soft preemption enabled
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-config-soft-prempt", rest_name="lsp-config-soft-prempt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_soft_prempt must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-soft-prempt", rest_name="lsp-config-soft-prempt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_config_soft_prempt = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_soft_prempt(self):
self.__lsp_config_soft_prempt = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-soft-prempt", rest_name="lsp-config-soft-prempt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_config_exclude_interface_change(self):
"""
Getter method for lsp_config_exclude_interface_change, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_exclude_interface_change (boolean)
YANG Description: LSP exclude interface changed
"""
return self.__lsp_config_exclude_interface_change
def _set_lsp_config_exclude_interface_change(self, v, load=False):
"""
Setter method for lsp_config_exclude_interface_change, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_exclude_interface_change (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_exclude_interface_change is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_exclude_interface_change() directly.
YANG Description: LSP exclude interface changed
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-config-exclude-interface-change", rest_name="lsp-config-exclude-interface-change", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_exclude_interface_change must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-exclude-interface-change", rest_name="lsp-config-exclude-interface-change", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_config_exclude_interface_change = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_exclude_interface_change(self):
self.__lsp_config_exclude_interface_change = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-exclude-interface-change", rest_name="lsp-config-exclude-interface-change", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_config_prority_configured(self):
"""
Getter method for lsp_config_prority_configured, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_prority_configured (boolean)
YANG Description: LSP priority configured
"""
return self.__lsp_config_prority_configured
def _set_lsp_config_prority_configured(self, v, load=False):
"""
Setter method for lsp_config_prority_configured, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_prority_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_prority_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_prority_configured() directly.
YANG Description: LSP priority configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-config-prority-configured", rest_name="lsp-config-prority-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_prority_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-prority-configured", rest_name="lsp-config-prority-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_config_prority_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_prority_configured(self):
self.__lsp_config_prority_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-prority-configured", rest_name="lsp-config-prority-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_config_setup_prority(self):
"""
Getter method for lsp_config_setup_prority, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_setup_prority (uint8)
YANG Description: LSP setup priority
"""
return self.__lsp_config_setup_prority
def _set_lsp_config_setup_prority(self, v, load=False):
"""
Setter method for lsp_config_setup_prority, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_setup_prority (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_setup_prority is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_setup_prority() directly.
YANG Description: LSP setup priority
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="lsp-config-setup-prority", rest_name="lsp-config-setup-prority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_setup_prority must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="lsp-config-setup-prority", rest_name="lsp-config-setup-prority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)""",
})
self.__lsp_config_setup_prority = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_setup_prority(self):
self.__lsp_config_setup_prority = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="lsp-config-setup-prority", rest_name="lsp-config-setup-prority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)
def _get_lsp_config_holding_prority(self):
"""
Getter method for lsp_config_holding_prority, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_holding_prority (uint8)
YANG Description: LSP holding priority
"""
return self.__lsp_config_holding_prority
def _set_lsp_config_holding_prority(self, v, load=False):
"""
Setter method for lsp_config_holding_prority, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_holding_prority (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_holding_prority is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_holding_prority() directly.
YANG Description: LSP holding priority
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="lsp-config-holding-prority", rest_name="lsp-config-holding-prority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_holding_prority must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="lsp-config-holding-prority", rest_name="lsp-config-holding-prority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)""",
})
self.__lsp_config_holding_prority = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_holding_prority(self):
self.__lsp_config_holding_prority = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="lsp-config-holding-prority", rest_name="lsp-config-holding-prority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)
def _get_lsp_config_hop_limit_configured(self):
"""
Getter method for lsp_config_hop_limit_configured, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_hop_limit_configured (boolean)
YANG Description: LSP hop limit configured
"""
return self.__lsp_config_hop_limit_configured
def _set_lsp_config_hop_limit_configured(self, v, load=False):
"""
Setter method for lsp_config_hop_limit_configured, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_hop_limit_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_hop_limit_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_hop_limit_configured() directly.
YANG Description: LSP hop limit configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-config-hop-limit-configured", rest_name="lsp-config-hop-limit-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_hop_limit_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-hop-limit-configured", rest_name="lsp-config-hop-limit-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_config_hop_limit_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_hop_limit_configured(self):
self.__lsp_config_hop_limit_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-hop-limit-configured", rest_name="lsp-config-hop-limit-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_config_hop_limit(self):
"""
Getter method for lsp_config_hop_limit, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_hop_limit (uint8)
YANG Description: LSP hop limit
"""
return self.__lsp_config_hop_limit
def _set_lsp_config_hop_limit(self, v, load=False):
"""
Setter method for lsp_config_hop_limit, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_hop_limit (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_hop_limit is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_hop_limit() directly.
YANG Description: LSP hop limit
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="lsp-config-hop-limit", rest_name="lsp-config-hop-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_hop_limit must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="lsp-config-hop-limit", rest_name="lsp-config-hop-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)""",
})
self.__lsp_config_hop_limit = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_hop_limit(self):
self.__lsp_config_hop_limit = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="lsp-config-hop-limit", rest_name="lsp-config-hop-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)
def _get_lsp_config_traffic_eng_rate_configured(self):
"""
Getter method for lsp_config_traffic_eng_rate_configured, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_traffic_eng_rate_configured (boolean)
YANG Description: LSP traffic engineering rates configured
"""
return self.__lsp_config_traffic_eng_rate_configured
def _set_lsp_config_traffic_eng_rate_configured(self, v, load=False):
"""
Setter method for lsp_config_traffic_eng_rate_configured, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_traffic_eng_rate_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_traffic_eng_rate_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_traffic_eng_rate_configured() directly.
YANG Description: LSP traffic engineering rates configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-config-traffic-eng-rate-configured", rest_name="lsp-config-traffic-eng-rate-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_traffic_eng_rate_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-traffic-eng-rate-configured", rest_name="lsp-config-traffic-eng-rate-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_config_traffic_eng_rate_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_traffic_eng_rate_configured(self):
self.__lsp_config_traffic_eng_rate_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-traffic-eng-rate-configured", rest_name="lsp-config-traffic-eng-rate-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_config_traffic_eng_mean_rate(self):
"""
Getter method for lsp_config_traffic_eng_mean_rate, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_traffic_eng_mean_rate (uint32)
YANG Description: LSP traffic engineering mean rate
"""
return self.__lsp_config_traffic_eng_mean_rate
def _set_lsp_config_traffic_eng_mean_rate(self, v, load=False):
"""
Setter method for lsp_config_traffic_eng_mean_rate, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_traffic_eng_mean_rate (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_traffic_eng_mean_rate is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_traffic_eng_mean_rate() directly.
YANG Description: LSP traffic engineering mean rate
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-config-traffic-eng-mean-rate", rest_name="lsp-config-traffic-eng-mean-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_traffic_eng_mean_rate must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-config-traffic-eng-mean-rate", rest_name="lsp-config-traffic-eng-mean-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__lsp_config_traffic_eng_mean_rate = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_traffic_eng_mean_rate(self):
self.__lsp_config_traffic_eng_mean_rate = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-config-traffic-eng-mean-rate", rest_name="lsp-config-traffic-eng-mean-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_lsp_config_traffic_eng_max_rate(self):
"""
Getter method for lsp_config_traffic_eng_max_rate, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_traffic_eng_max_rate (uint32)
YANG Description: LSP traffic engineering max rate
"""
return self.__lsp_config_traffic_eng_max_rate
def _set_lsp_config_traffic_eng_max_rate(self, v, load=False):
"""
Setter method for lsp_config_traffic_eng_max_rate, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_traffic_eng_max_rate (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_traffic_eng_max_rate is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_traffic_eng_max_rate() directly.
YANG Description: LSP traffic engineering max rate
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-config-traffic-eng-max-rate", rest_name="lsp-config-traffic-eng-max-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_traffic_eng_max_rate must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-config-traffic-eng-max-rate", rest_name="lsp-config-traffic-eng-max-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__lsp_config_traffic_eng_max_rate = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_traffic_eng_max_rate(self):
self.__lsp_config_traffic_eng_max_rate = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-config-traffic-eng-max-rate", rest_name="lsp-config-traffic-eng-max-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_lsp_config_traffic_eng_max_burst(self):
"""
Getter method for lsp_config_traffic_eng_max_burst, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_traffic_eng_max_burst (uint32)
YANG Description: LSP traffic engineering max-burst
"""
return self.__lsp_config_traffic_eng_max_burst
def _set_lsp_config_traffic_eng_max_burst(self, v, load=False):
"""
Setter method for lsp_config_traffic_eng_max_burst, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_traffic_eng_max_burst (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_traffic_eng_max_burst is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_traffic_eng_max_burst() directly.
YANG Description: LSP traffic engineering max-burst
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-config-traffic-eng-max-burst", rest_name="lsp-config-traffic-eng-max-burst", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_traffic_eng_max_burst must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-config-traffic-eng-max-burst", rest_name="lsp-config-traffic-eng-max-burst", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__lsp_config_traffic_eng_max_burst = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_traffic_eng_max_burst(self):
self.__lsp_config_traffic_eng_max_burst = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-config-traffic-eng-max-burst", rest_name="lsp-config-traffic-eng-max-burst", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_lsp_config_admin_group_configured(self):
"""
Getter method for lsp_config_admin_group_configured, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_admin_group_configured (boolean)
YANG Description: LSP admin group configured
"""
return self.__lsp_config_admin_group_configured
def _set_lsp_config_admin_group_configured(self, v, load=False):
"""
Setter method for lsp_config_admin_group_configured, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_admin_group_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_admin_group_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_admin_group_configured() directly.
YANG Description: LSP admin group configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-config-admin-group-configured", rest_name="lsp-config-admin-group-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_admin_group_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-admin-group-configured", rest_name="lsp-config-admin-group-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_config_admin_group_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_admin_group_configured(self):
self.__lsp_config_admin_group_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-config-admin-group-configured", rest_name="lsp-config-admin-group-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_config_admin_groups(self):
"""
Getter method for lsp_config_admin_groups, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_admin_groups (container)
"""
return self.__lsp_config_admin_groups
def _set_lsp_config_admin_groups(self, v, load=False):
"""
Setter method for lsp_config_admin_groups, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_config_admin_groups (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_config_admin_groups is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_config_admin_groups() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=lsp_config_admin_groups.lsp_config_admin_groups, is_container='container', presence=False, yang_name="lsp-config-admin-groups", rest_name="lsp-config-admin-groups", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_config_admin_groups must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=lsp_config_admin_groups.lsp_config_admin_groups, is_container='container', presence=False, yang_name="lsp-config-admin-groups", rest_name="lsp-config-admin-groups", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)""",
})
self.__lsp_config_admin_groups = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_config_admin_groups(self):
self.__lsp_config_admin_groups = YANGDynClass(base=lsp_config_admin_groups.lsp_config_admin_groups, is_container='container', presence=False, yang_name="lsp-config-admin-groups", rest_name="lsp-config-admin-groups", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
def _get_lsp_path_computed_by_cspf(self):
"""
Getter method for lsp_path_computed_by_cspf, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_path_computed_by_cspf (boolean)
YANG Description: LSP path computaion by CSPF
"""
return self.__lsp_path_computed_by_cspf
def _set_lsp_path_computed_by_cspf(self, v, load=False):
"""
Setter method for lsp_path_computed_by_cspf, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_path_computed_by_cspf (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_path_computed_by_cspf is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_path_computed_by_cspf() directly.
YANG Description: LSP path computaion by CSPF
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-path-computed-by-cspf", rest_name="lsp-path-computed-by-cspf", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_path_computed_by_cspf must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-path-computed-by-cspf", rest_name="lsp-path-computed-by-cspf", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_path_computed_by_cspf = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_path_computed_by_cspf(self):
self.__lsp_path_computed_by_cspf = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-path-computed-by-cspf", rest_name="lsp-path-computed-by-cspf", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_path_computed_by_interface_constraint(self):
"""
Getter method for lsp_path_computed_by_interface_constraint, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_path_computed_by_interface_constraint (boolean)
YANG Description: LSP path computaion by CSPF interface constarint
"""
return self.__lsp_path_computed_by_interface_constraint
def _set_lsp_path_computed_by_interface_constraint(self, v, load=False):
"""
Setter method for lsp_path_computed_by_interface_constraint, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_path_computed_by_interface_constraint (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_path_computed_by_interface_constraint is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_path_computed_by_interface_constraint() directly.
YANG Description: LSP path computaion by CSPF interface constarint
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-path-computed-by-interface-constraint", rest_name="lsp-path-computed-by-interface-constraint", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_path_computed_by_interface_constraint must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-path-computed-by-interface-constraint", rest_name="lsp-path-computed-by-interface-constraint", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_path_computed_by_interface_constraint = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_path_computed_by_interface_constraint(self):
self.__lsp_path_computed_by_interface_constraint = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-path-computed-by-interface-constraint", rest_name="lsp-path-computed-by-interface-constraint", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_cspf_computation_mode_default(self):
"""
Getter method for lsp_cspf_computation_mode_default, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_computation_mode_default (boolean)
YANG Description: LSP path computaion mode default
"""
return self.__lsp_cspf_computation_mode_default
def _set_lsp_cspf_computation_mode_default(self, v, load=False):
"""
Setter method for lsp_cspf_computation_mode_default, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_computation_mode_default (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_cspf_computation_mode_default is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_cspf_computation_mode_default() directly.
YANG Description: LSP path computaion mode default
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-cspf-computation-mode-default", rest_name="lsp-cspf-computation-mode-default", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_cspf_computation_mode_default must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-cspf-computation-mode-default", rest_name="lsp-cspf-computation-mode-default", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_cspf_computation_mode_default = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_cspf_computation_mode_default(self):
self.__lsp_cspf_computation_mode_default = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-cspf-computation-mode-default", rest_name="lsp-cspf-computation-mode-default", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_cspf_computation_mode_use_bypass_metric(self):
"""
Getter method for lsp_cspf_computation_mode_use_bypass_metric, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_computation_mode_use_bypass_metric (boolean)
YANG Description: LSP path computaion mode is use bupass metric
"""
return self.__lsp_cspf_computation_mode_use_bypass_metric
def _set_lsp_cspf_computation_mode_use_bypass_metric(self, v, load=False):
"""
Setter method for lsp_cspf_computation_mode_use_bypass_metric, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_computation_mode_use_bypass_metric (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_cspf_computation_mode_use_bypass_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_cspf_computation_mode_use_bypass_metric() directly.
YANG Description: LSP path computaion mode is use bupass metric
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-cspf-computation-mode-use-bypass-metric", rest_name="lsp-cspf-computation-mode-use-bypass-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_cspf_computation_mode_use_bypass_metric must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-cspf-computation-mode-use-bypass-metric", rest_name="lsp-cspf-computation-mode-use-bypass-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_cspf_computation_mode_use_bypass_metric = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_cspf_computation_mode_use_bypass_metric(self):
self.__lsp_cspf_computation_mode_use_bypass_metric = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-cspf-computation-mode-use-bypass-metric", rest_name="lsp-cspf-computation-mode-use-bypass-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_cspf_computation_mode_use_bypass_liberal(self):
"""
Getter method for lsp_cspf_computation_mode_use_bypass_liberal, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_computation_mode_use_bypass_liberal (boolean)
YANG Description: LSP path computaion mode is use bypass liberal
"""
return self.__lsp_cspf_computation_mode_use_bypass_liberal
def _set_lsp_cspf_computation_mode_use_bypass_liberal(self, v, load=False):
"""
Setter method for lsp_cspf_computation_mode_use_bypass_liberal, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_computation_mode_use_bypass_liberal (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_cspf_computation_mode_use_bypass_liberal is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_cspf_computation_mode_use_bypass_liberal() directly.
YANG Description: LSP path computaion mode is use bypass liberal
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-cspf-computation-mode-use-bypass-liberal", rest_name="lsp-cspf-computation-mode-use-bypass-liberal", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_cspf_computation_mode_use_bypass_liberal must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-cspf-computation-mode-use-bypass-liberal", rest_name="lsp-cspf-computation-mode-use-bypass-liberal", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_cspf_computation_mode_use_bypass_liberal = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_cspf_computation_mode_use_bypass_liberal(self):
self.__lsp_cspf_computation_mode_use_bypass_liberal = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-cspf-computation-mode-use-bypass-liberal", rest_name="lsp-cspf-computation-mode-use-bypass-liberal", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_cspf_group_computation_mode_default(self):
"""
Getter method for lsp_cspf_group_computation_mode_default, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_group_computation_mode_default (boolean)
YANG Description: LSP path computaion group mode default
"""
return self.__lsp_cspf_group_computation_mode_default
def _set_lsp_cspf_group_computation_mode_default(self, v, load=False):
"""
Setter method for lsp_cspf_group_computation_mode_default, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_group_computation_mode_default (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_cspf_group_computation_mode_default is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_cspf_group_computation_mode_default() directly.
YANG Description: LSP path computaion group mode default
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-cspf-group-computation-mode-default", rest_name="lsp-cspf-group-computation-mode-default", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_cspf_group_computation_mode_default must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-cspf-group-computation-mode-default", rest_name="lsp-cspf-group-computation-mode-default", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_cspf_group_computation_mode_default = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_cspf_group_computation_mode_default(self):
self.__lsp_cspf_group_computation_mode_default = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-cspf-group-computation-mode-default", rest_name="lsp-cspf-group-computation-mode-default", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_cspf_group_computation_mode_add_penalty(self):
"""
Getter method for lsp_cspf_group_computation_mode_add_penalty, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_group_computation_mode_add_penalty (boolean)
YANG Description: LSP path computaion group mode is add penalty
"""
return self.__lsp_cspf_group_computation_mode_add_penalty
def _set_lsp_cspf_group_computation_mode_add_penalty(self, v, load=False):
"""
Setter method for lsp_cspf_group_computation_mode_add_penalty, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_group_computation_mode_add_penalty (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_cspf_group_computation_mode_add_penalty is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_cspf_group_computation_mode_add_penalty() directly.
YANG Description: LSP path computaion group mode is add penalty
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-cspf-group-computation-mode-add-penalty", rest_name="lsp-cspf-group-computation-mode-add-penalty", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_cspf_group_computation_mode_add_penalty must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-cspf-group-computation-mode-add-penalty", rest_name="lsp-cspf-group-computation-mode-add-penalty", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_cspf_group_computation_mode_add_penalty = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_cspf_group_computation_mode_add_penalty(self):
self.__lsp_cspf_group_computation_mode_add_penalty = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-cspf-group-computation-mode-add-penalty", rest_name="lsp-cspf-group-computation-mode-add-penalty", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_cspf_group_computation_mode_exclude_groups(self):
"""
Getter method for lsp_cspf_group_computation_mode_exclude_groups, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_group_computation_mode_exclude_groups (boolean)
YANG Description: LSP path computaion group mode is exclude groups
"""
return self.__lsp_cspf_group_computation_mode_exclude_groups
def _set_lsp_cspf_group_computation_mode_exclude_groups(self, v, load=False):
"""
Setter method for lsp_cspf_group_computation_mode_exclude_groups, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_group_computation_mode_exclude_groups (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_cspf_group_computation_mode_exclude_groups is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_cspf_group_computation_mode_exclude_groups() directly.
YANG Description: LSP path computaion group mode is exclude groups
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-cspf-group-computation-mode-exclude-groups", rest_name="lsp-cspf-group-computation-mode-exclude-groups", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_cspf_group_computation_mode_exclude_groups must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-cspf-group-computation-mode-exclude-groups", rest_name="lsp-cspf-group-computation-mode-exclude-groups", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_cspf_group_computation_mode_exclude_groups = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_cspf_group_computation_mode_exclude_groups(self):
self.__lsp_cspf_group_computation_mode_exclude_groups = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-cspf-group-computation-mode-exclude-groups", rest_name="lsp-cspf-group-computation-mode-exclude-groups", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_cspf_group_computation_mode_high_cost(self):
"""
Getter method for lsp_cspf_group_computation_mode_high_cost, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_group_computation_mode_high_cost (boolean)
YANG Description: LSP path computaion group mode is exclude groups
"""
return self.__lsp_cspf_group_computation_mode_high_cost
def _set_lsp_cspf_group_computation_mode_high_cost(self, v, load=False):
"""
Setter method for lsp_cspf_group_computation_mode_high_cost, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_group_computation_mode_high_cost (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_cspf_group_computation_mode_high_cost is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_cspf_group_computation_mode_high_cost() directly.
YANG Description: LSP path computaion group mode is exclude groups
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-cspf-group-computation-mode-high-cost", rest_name="lsp-cspf-group-computation-mode-high-cost", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_cspf_group_computation_mode_high_cost must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-cspf-group-computation-mode-high-cost", rest_name="lsp-cspf-group-computation-mode-high-cost", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_cspf_group_computation_mode_high_cost = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_cspf_group_computation_mode_high_cost(self):
self.__lsp_cspf_group_computation_mode_high_cost = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-cspf-group-computation-mode-high-cost", rest_name="lsp-cspf-group-computation-mode-high-cost", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_cspf_path_cost(self):
"""
Getter method for lsp_cspf_path_cost, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_path_cost (uint32)
YANG Description: LSP CSPF computed path cost
"""
return self.__lsp_cspf_path_cost
def _set_lsp_cspf_path_cost(self, v, load=False):
"""
Setter method for lsp_cspf_path_cost, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_path_cost (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_cspf_path_cost is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_cspf_path_cost() directly.
YANG Description: LSP CSPF computed path cost
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-cspf-path-cost", rest_name="lsp-cspf-path-cost", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_cspf_path_cost must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-cspf-path-cost", rest_name="lsp-cspf-path-cost", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__lsp_cspf_path_cost = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_cspf_path_cost(self):
self.__lsp_cspf_path_cost = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-cspf-path-cost", rest_name="lsp-cspf-path-cost", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_lsp_cspf_path_area(self):
"""
Getter method for lsp_cspf_path_area, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_path_area (inet:ipv4-address)
YANG Description: LSP CSPF paths area
"""
return self.__lsp_cspf_path_area
def _set_lsp_cspf_path_area(self, v, load=False):
"""
Setter method for lsp_cspf_path_area, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_path_area (inet:ipv4-address)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_cspf_path_area is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_cspf_path_area() directly.
YANG Description: LSP CSPF paths area
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="lsp-cspf-path-area", rest_name="lsp-cspf-path-area", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='inet:ipv4-address', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_cspf_path_area must be of a type compatible with inet:ipv4-address""",
'defined-type': "inet:ipv4-address",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="lsp-cspf-path-area", rest_name="lsp-cspf-path-area", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='inet:ipv4-address', is_config=True)""",
})
self.__lsp_cspf_path_area = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_cspf_path_area(self):
self.__lsp_cspf_path_area = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="lsp-cspf-path-area", rest_name="lsp-cspf-path-area", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='inet:ipv4-address', is_config=True)
def _get_lsp_cspf_computation_error(self):
"""
Getter method for lsp_cspf_computation_error, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_computation_error (uint32)
YANG Description: LSP CSPF computaion error code
"""
return self.__lsp_cspf_computation_error
def _set_lsp_cspf_computation_error(self, v, load=False):
"""
Setter method for lsp_cspf_computation_error, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_computation_error (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_cspf_computation_error is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_cspf_computation_error() directly.
YANG Description: LSP CSPF computaion error code
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-cspf-computation-error", rest_name="lsp-cspf-computation-error", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_cspf_computation_error must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-cspf-computation-error", rest_name="lsp-cspf-computation-error", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__lsp_cspf_computation_error = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_cspf_computation_error(self):
self.__lsp_cspf_computation_error = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-cspf-computation-error", rest_name="lsp-cspf-computation-error", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_lsp_cspf_path_hops(self):
"""
Getter method for lsp_cspf_path_hops, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_path_hops (container)
"""
return self.__lsp_cspf_path_hops
def _set_lsp_cspf_path_hops(self, v, load=False):
"""
Setter method for lsp_cspf_path_hops, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_path_hops (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_cspf_path_hops is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_cspf_path_hops() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=lsp_cspf_path_hops.lsp_cspf_path_hops, is_container='container', presence=False, yang_name="lsp-cspf-path-hops", rest_name="lsp-cspf-path-hops", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_cspf_path_hops must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=lsp_cspf_path_hops.lsp_cspf_path_hops, is_container='container', presence=False, yang_name="lsp-cspf-path-hops", rest_name="lsp-cspf-path-hops", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)""",
})
self.__lsp_cspf_path_hops = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_cspf_path_hops(self):
self.__lsp_cspf_path_hops = YANGDynClass(base=lsp_cspf_path_hops.lsp_cspf_path_hops, is_container='container', presence=False, yang_name="lsp-cspf-path-hops", rest_name="lsp-cspf-path-hops", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
def _get_lsp_cspf_exclude_hops_present(self):
"""
Getter method for lsp_cspf_exclude_hops_present, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_exclude_hops_present (boolean)
YANG Description: LSP cspf exclude hops present
"""
return self.__lsp_cspf_exclude_hops_present
def _set_lsp_cspf_exclude_hops_present(self, v, load=False):
"""
Setter method for lsp_cspf_exclude_hops_present, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_exclude_hops_present (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_cspf_exclude_hops_present is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_cspf_exclude_hops_present() directly.
YANG Description: LSP cspf exclude hops present
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-cspf-exclude-hops-present", rest_name="lsp-cspf-exclude-hops-present", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_cspf_exclude_hops_present must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-cspf-exclude-hops-present", rest_name="lsp-cspf-exclude-hops-present", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_cspf_exclude_hops_present = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_cspf_exclude_hops_present(self):
self.__lsp_cspf_exclude_hops_present = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-cspf-exclude-hops-present", rest_name="lsp-cspf-exclude-hops-present", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_cspf_exclude_hops(self):
"""
Getter method for lsp_cspf_exclude_hops, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_exclude_hops (container)
"""
return self.__lsp_cspf_exclude_hops
def _set_lsp_cspf_exclude_hops(self, v, load=False):
"""
Setter method for lsp_cspf_exclude_hops, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_cspf_exclude_hops (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_cspf_exclude_hops is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_cspf_exclude_hops() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=lsp_cspf_exclude_hops.lsp_cspf_exclude_hops, is_container='container', presence=False, yang_name="lsp-cspf-exclude-hops", rest_name="lsp-cspf-exclude-hops", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_cspf_exclude_hops must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=lsp_cspf_exclude_hops.lsp_cspf_exclude_hops, is_container='container', presence=False, yang_name="lsp-cspf-exclude-hops", rest_name="lsp-cspf-exclude-hops", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)""",
})
self.__lsp_cspf_exclude_hops = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_cspf_exclude_hops(self):
self.__lsp_cspf_exclude_hops = YANGDynClass(base=lsp_cspf_exclude_hops.lsp_cspf_exclude_hops, is_container='container', presence=False, yang_name="lsp-cspf-exclude-hops", rest_name="lsp-cspf-exclude-hops", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
def _get_lsp_rsvp_session_present(self):
"""
Getter method for lsp_rsvp_session_present, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_present (boolean)
YANG Description: LSP RSVP session exists
"""
return self.__lsp_rsvp_session_present
def _set_lsp_rsvp_session_present(self, v, load=False):
"""
Setter method for lsp_rsvp_session_present, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_present (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_rsvp_session_present is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_rsvp_session_present() directly.
YANG Description: LSP RSVP session exists
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-rsvp-session-present", rest_name="lsp-rsvp-session-present", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_rsvp_session_present must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-rsvp-session-present", rest_name="lsp-rsvp-session-present", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_rsvp_session_present = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_rsvp_session_present(self):
self.__lsp_rsvp_session_present = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-rsvp-session-present", rest_name="lsp-rsvp-session-present", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_rsvp_session_state_up(self):
"""
Getter method for lsp_rsvp_session_state_up, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_state_up (boolean)
YANG Description: LSP RSVP session state
"""
return self.__lsp_rsvp_session_state_up
def _set_lsp_rsvp_session_state_up(self, v, load=False):
"""
Setter method for lsp_rsvp_session_state_up, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_state_up (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_rsvp_session_state_up is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_rsvp_session_state_up() directly.
YANG Description: LSP RSVP session state
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-rsvp-session-state-up", rest_name="lsp-rsvp-session-state-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_rsvp_session_state_up must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-rsvp-session-state-up", rest_name="lsp-rsvp-session-state-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_rsvp_session_state_up = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_rsvp_session_state_up(self):
self.__lsp_rsvp_session_state_up = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-rsvp-session-state-up", rest_name="lsp-rsvp-session-state-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_rsvp_session_state(self):
"""
Getter method for lsp_rsvp_session_state, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_state (uint8)
YANG Description: LSP RSVP session state
"""
return self.__lsp_rsvp_session_state
def _set_lsp_rsvp_session_state(self, v, load=False):
"""
Setter method for lsp_rsvp_session_state, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_state (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_rsvp_session_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_rsvp_session_state() directly.
YANG Description: LSP RSVP session state
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="lsp-rsvp-session-state", rest_name="lsp-rsvp-session-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_rsvp_session_state must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="lsp-rsvp-session-state", rest_name="lsp-rsvp-session-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)""",
})
self.__lsp_rsvp_session_state = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_rsvp_session_state(self):
self.__lsp_rsvp_session_state = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="lsp-rsvp-session-state", rest_name="lsp-rsvp-session-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)
def _get_lsp_rsvp_session_path_error_code(self):
"""
Getter method for lsp_rsvp_session_path_error_code, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_path_error_code (uint32)
YANG Description: LSP RSVP session path error code
"""
return self.__lsp_rsvp_session_path_error_code
def _set_lsp_rsvp_session_path_error_code(self, v, load=False):
"""
Setter method for lsp_rsvp_session_path_error_code, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_path_error_code (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_rsvp_session_path_error_code is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_rsvp_session_path_error_code() directly.
YANG Description: LSP RSVP session path error code
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-rsvp-session-path-error-code", rest_name="lsp-rsvp-session-path-error-code", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_rsvp_session_path_error_code must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-rsvp-session-path-error-code", rest_name="lsp-rsvp-session-path-error-code", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__lsp_rsvp_session_path_error_code = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_rsvp_session_path_error_code(self):
self.__lsp_rsvp_session_path_error_code = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-rsvp-session-path-error-code", rest_name="lsp-rsvp-session-path-error-code", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_lsp_rsvp_session_path_error_value(self):
"""
Getter method for lsp_rsvp_session_path_error_value, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_path_error_value (uint32)
YANG Description: LSP RSVP session path error value
"""
return self.__lsp_rsvp_session_path_error_value
def _set_lsp_rsvp_session_path_error_value(self, v, load=False):
"""
Setter method for lsp_rsvp_session_path_error_value, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_path_error_value (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_rsvp_session_path_error_value is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_rsvp_session_path_error_value() directly.
YANG Description: LSP RSVP session path error value
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-rsvp-session-path-error-value", rest_name="lsp-rsvp-session-path-error-value", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_rsvp_session_path_error_value must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-rsvp-session-path-error-value", rest_name="lsp-rsvp-session-path-error-value", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__lsp_rsvp_session_path_error_value = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_rsvp_session_path_error_value(self):
self.__lsp_rsvp_session_path_error_value = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-rsvp-session-path-error-value", rest_name="lsp-rsvp-session-path-error-value", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_lsp_rsvp_session_path_error_node_address(self):
"""
Getter method for lsp_rsvp_session_path_error_node_address, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_path_error_node_address (inet:ipv4-address)
YANG Description: LSP RSVP session path error node address
"""
return self.__lsp_rsvp_session_path_error_node_address
def _set_lsp_rsvp_session_path_error_node_address(self, v, load=False):
"""
Setter method for lsp_rsvp_session_path_error_node_address, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_path_error_node_address (inet:ipv4-address)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_rsvp_session_path_error_node_address is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_rsvp_session_path_error_node_address() directly.
YANG Description: LSP RSVP session path error node address
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="lsp-rsvp-session-path-error-node-address", rest_name="lsp-rsvp-session-path-error-node-address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='inet:ipv4-address', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_rsvp_session_path_error_node_address must be of a type compatible with inet:ipv4-address""",
'defined-type': "inet:ipv4-address",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="lsp-rsvp-session-path-error-node-address", rest_name="lsp-rsvp-session-path-error-node-address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='inet:ipv4-address', is_config=True)""",
})
self.__lsp_rsvp_session_path_error_node_address = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_rsvp_session_path_error_node_address(self):
self.__lsp_rsvp_session_path_error_node_address = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="lsp-rsvp-session-path-error-node-address", rest_name="lsp-rsvp-session-path-error-node-address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='inet:ipv4-address', is_config=True)
def _get_lsp_rsvp_session_rro_hops_present(self):
"""
Getter method for lsp_rsvp_session_rro_hops_present, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops_present (boolean)
YANG Description: LSP RSVP session RRO present
"""
return self.__lsp_rsvp_session_rro_hops_present
def _set_lsp_rsvp_session_rro_hops_present(self, v, load=False):
"""
Setter method for lsp_rsvp_session_rro_hops_present, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops_present (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_rsvp_session_rro_hops_present is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_rsvp_session_rro_hops_present() directly.
YANG Description: LSP RSVP session RRO present
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-rsvp-session-rro-hops-present", rest_name="lsp-rsvp-session-rro-hops-present", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_rsvp_session_rro_hops_present must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-rsvp-session-rro-hops-present", rest_name="lsp-rsvp-session-rro-hops-present", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_rsvp_session_rro_hops_present = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_rsvp_session_rro_hops_present(self):
self.__lsp_rsvp_session_rro_hops_present = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-rsvp-session-rro-hops-present", rest_name="lsp-rsvp-session-rro-hops-present", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_rsvp_session_rro_hops(self):
"""
Getter method for lsp_rsvp_session_rro_hops, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops (container)
"""
return self.__lsp_rsvp_session_rro_hops
def _set_lsp_rsvp_session_rro_hops(self, v, load=False):
"""
Setter method for lsp_rsvp_session_rro_hops, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_rsvp_session_rro_hops is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_rsvp_session_rro_hops() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=lsp_rsvp_session_rro_hops.lsp_rsvp_session_rro_hops, is_container='container', presence=False, yang_name="lsp-rsvp-session-rro-hops", rest_name="lsp-rsvp-session-rro-hops", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_rsvp_session_rro_hops must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=lsp_rsvp_session_rro_hops.lsp_rsvp_session_rro_hops, is_container='container', presence=False, yang_name="lsp-rsvp-session-rro-hops", rest_name="lsp-rsvp-session-rro-hops", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)""",
})
self.__lsp_rsvp_session_rro_hops = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_rsvp_session_rro_hops(self):
self.__lsp_rsvp_session_rro_hops = YANGDynClass(base=lsp_rsvp_session_rro_hops.lsp_rsvp_session_rro_hops, is_container='container', presence=False, yang_name="lsp-rsvp-session-rro-hops", rest_name="lsp-rsvp-session-rro-hops", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
def _get_lsp_maximum_bandwidth(self):
"""
Getter method for lsp_maximum_bandwidth, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_maximum_bandwidth (uint32)
YANG Description: LSP maximum bandwidth
"""
return self.__lsp_maximum_bandwidth
def _set_lsp_maximum_bandwidth(self, v, load=False):
"""
Setter method for lsp_maximum_bandwidth, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_maximum_bandwidth (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_maximum_bandwidth is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_maximum_bandwidth() directly.
YANG Description: LSP maximum bandwidth
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-maximum-bandwidth", rest_name="lsp-maximum-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_maximum_bandwidth must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-maximum-bandwidth", rest_name="lsp-maximum-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__lsp_maximum_bandwidth = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_maximum_bandwidth(self):
self.__lsp_maximum_bandwidth = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-maximum-bandwidth", rest_name="lsp-maximum-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_lsp_unreserved_priority_0_bandwidth(self):
"""
Getter method for lsp_unreserved_priority_0_bandwidth, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_unreserved_priority_0_bandwidth (uint32)
YANG Description: Unreserved bandwidth at priority 0
"""
return self.__lsp_unreserved_priority_0_bandwidth
def _set_lsp_unreserved_priority_0_bandwidth(self, v, load=False):
"""
Setter method for lsp_unreserved_priority_0_bandwidth, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_unreserved_priority_0_bandwidth (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_unreserved_priority_0_bandwidth is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_unreserved_priority_0_bandwidth() directly.
YANG Description: Unreserved bandwidth at priority 0
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-0-bandwidth", rest_name="lsp-unreserved-priority-0-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_unreserved_priority_0_bandwidth must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-0-bandwidth", rest_name="lsp-unreserved-priority-0-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__lsp_unreserved_priority_0_bandwidth = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_unreserved_priority_0_bandwidth(self):
self.__lsp_unreserved_priority_0_bandwidth = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-0-bandwidth", rest_name="lsp-unreserved-priority-0-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_lsp_unreserved_priority_1_bandwidth(self):
"""
Getter method for lsp_unreserved_priority_1_bandwidth, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_unreserved_priority_1_bandwidth (uint32)
YANG Description: Unreserved bandwidth at priority 1
"""
return self.__lsp_unreserved_priority_1_bandwidth
def _set_lsp_unreserved_priority_1_bandwidth(self, v, load=False):
"""
Setter method for lsp_unreserved_priority_1_bandwidth, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_unreserved_priority_1_bandwidth (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_unreserved_priority_1_bandwidth is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_unreserved_priority_1_bandwidth() directly.
YANG Description: Unreserved bandwidth at priority 1
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-1-bandwidth", rest_name="lsp-unreserved-priority-1-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_unreserved_priority_1_bandwidth must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-1-bandwidth", rest_name="lsp-unreserved-priority-1-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__lsp_unreserved_priority_1_bandwidth = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_unreserved_priority_1_bandwidth(self):
self.__lsp_unreserved_priority_1_bandwidth = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-1-bandwidth", rest_name="lsp-unreserved-priority-1-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_lsp_unreserved_priority_2_bandwidth(self):
"""
Getter method for lsp_unreserved_priority_2_bandwidth, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_unreserved_priority_2_bandwidth (uint32)
YANG Description: Unreserved bandwidth at priority 2
"""
return self.__lsp_unreserved_priority_2_bandwidth
def _set_lsp_unreserved_priority_2_bandwidth(self, v, load=False):
"""
Setter method for lsp_unreserved_priority_2_bandwidth, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_unreserved_priority_2_bandwidth (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_unreserved_priority_2_bandwidth is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_unreserved_priority_2_bandwidth() directly.
YANG Description: Unreserved bandwidth at priority 2
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-2-bandwidth", rest_name="lsp-unreserved-priority-2-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_unreserved_priority_2_bandwidth must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-2-bandwidth", rest_name="lsp-unreserved-priority-2-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__lsp_unreserved_priority_2_bandwidth = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_unreserved_priority_2_bandwidth(self):
self.__lsp_unreserved_priority_2_bandwidth = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-2-bandwidth", rest_name="lsp-unreserved-priority-2-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_lsp_unreserved_priority_3_bandwidth(self):
"""
Getter method for lsp_unreserved_priority_3_bandwidth, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_unreserved_priority_3_bandwidth (uint32)
YANG Description: Unreserved bandwidth at priority 3
"""
return self.__lsp_unreserved_priority_3_bandwidth
def _set_lsp_unreserved_priority_3_bandwidth(self, v, load=False):
"""
Setter method for lsp_unreserved_priority_3_bandwidth, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_unreserved_priority_3_bandwidth (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_unreserved_priority_3_bandwidth is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_unreserved_priority_3_bandwidth() directly.
YANG Description: Unreserved bandwidth at priority 3
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-3-bandwidth", rest_name="lsp-unreserved-priority-3-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_unreserved_priority_3_bandwidth must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-3-bandwidth", rest_name="lsp-unreserved-priority-3-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__lsp_unreserved_priority_3_bandwidth = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_unreserved_priority_3_bandwidth(self):
self.__lsp_unreserved_priority_3_bandwidth = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-3-bandwidth", rest_name="lsp-unreserved-priority-3-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_lsp_unreserved_priority_4_bandwidth(self):
"""
Getter method for lsp_unreserved_priority_4_bandwidth, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_unreserved_priority_4_bandwidth (uint32)
YANG Description: Unreserved bandwidth at priority 4
"""
return self.__lsp_unreserved_priority_4_bandwidth
def _set_lsp_unreserved_priority_4_bandwidth(self, v, load=False):
"""
Setter method for lsp_unreserved_priority_4_bandwidth, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_unreserved_priority_4_bandwidth (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_unreserved_priority_4_bandwidth is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_unreserved_priority_4_bandwidth() directly.
YANG Description: Unreserved bandwidth at priority 4
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-4-bandwidth", rest_name="lsp-unreserved-priority-4-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_unreserved_priority_4_bandwidth must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-4-bandwidth", rest_name="lsp-unreserved-priority-4-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__lsp_unreserved_priority_4_bandwidth = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_unreserved_priority_4_bandwidth(self):
self.__lsp_unreserved_priority_4_bandwidth = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-4-bandwidth", rest_name="lsp-unreserved-priority-4-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_lsp_unreserved_priority_5_bandwidth(self):
"""
Getter method for lsp_unreserved_priority_5_bandwidth, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_unreserved_priority_5_bandwidth (uint32)
YANG Description: Unreserved bandwidth at priority 5
"""
return self.__lsp_unreserved_priority_5_bandwidth
def _set_lsp_unreserved_priority_5_bandwidth(self, v, load=False):
"""
Setter method for lsp_unreserved_priority_5_bandwidth, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_unreserved_priority_5_bandwidth (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_unreserved_priority_5_bandwidth is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_unreserved_priority_5_bandwidth() directly.
YANG Description: Unreserved bandwidth at priority 5
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-5-bandwidth", rest_name="lsp-unreserved-priority-5-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_unreserved_priority_5_bandwidth must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-5-bandwidth", rest_name="lsp-unreserved-priority-5-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__lsp_unreserved_priority_5_bandwidth = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_unreserved_priority_5_bandwidth(self):
self.__lsp_unreserved_priority_5_bandwidth = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-5-bandwidth", rest_name="lsp-unreserved-priority-5-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_lsp_unreserved_priority_6_bandwidth(self):
"""
Getter method for lsp_unreserved_priority_6_bandwidth, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_unreserved_priority_6_bandwidth (uint32)
YANG Description: Unreserved bandwidth at priority 6
"""
return self.__lsp_unreserved_priority_6_bandwidth
def _set_lsp_unreserved_priority_6_bandwidth(self, v, load=False):
"""
Setter method for lsp_unreserved_priority_6_bandwidth, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_unreserved_priority_6_bandwidth (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_unreserved_priority_6_bandwidth is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_unreserved_priority_6_bandwidth() directly.
YANG Description: Unreserved bandwidth at priority 6
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-6-bandwidth", rest_name="lsp-unreserved-priority-6-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_unreserved_priority_6_bandwidth must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-6-bandwidth", rest_name="lsp-unreserved-priority-6-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__lsp_unreserved_priority_6_bandwidth = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_unreserved_priority_6_bandwidth(self):
self.__lsp_unreserved_priority_6_bandwidth = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-6-bandwidth", rest_name="lsp-unreserved-priority-6-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_lsp_unreserved_priority_7_bandwidth(self):
"""
Getter method for lsp_unreserved_priority_7_bandwidth, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_unreserved_priority_7_bandwidth (uint32)
YANG Description: Unreserved bandwidth at priority 7
"""
return self.__lsp_unreserved_priority_7_bandwidth
def _set_lsp_unreserved_priority_7_bandwidth(self, v, load=False):
"""
Setter method for lsp_unreserved_priority_7_bandwidth, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_debug/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_instances_info/lsp_instances/lsp_unreserved_priority_7_bandwidth (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_unreserved_priority_7_bandwidth is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_unreserved_priority_7_bandwidth() directly.
YANG Description: Unreserved bandwidth at priority 7
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-7-bandwidth", rest_name="lsp-unreserved-priority-7-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_unreserved_priority_7_bandwidth must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-7-bandwidth", rest_name="lsp-unreserved-priority-7-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__lsp_unreserved_priority_7_bandwidth = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_unreserved_priority_7_bandwidth(self):
self.__lsp_unreserved_priority_7_bandwidth = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-unreserved-priority-7-bandwidth", rest_name="lsp-unreserved-priority-7-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
lsp_instance_number = __builtin__.property(_get_lsp_instance_number, _set_lsp_instance_number)
lsp_current_instance = __builtin__.property(_get_lsp_current_instance, _set_lsp_current_instance)
lsp_new_instance = __builtin__.property(_get_lsp_new_instance, _set_lsp_new_instance)
lsp_old_instance = __builtin__.property(_get_lsp_old_instance, _set_lsp_old_instance)
lsp_is_primary = __builtin__.property(_get_lsp_is_primary, _set_lsp_is_primary)
lsp_is_current_secondary = __builtin__.property(_get_lsp_is_current_secondary, _set_lsp_is_current_secondary)
lsp_is_selected_secondary = __builtin__.property(_get_lsp_is_selected_secondary, _set_lsp_is_selected_secondary)
lsp_config_path_configured = __builtin__.property(_get_lsp_config_path_configured, _set_lsp_config_path_configured)
lsp_config_path = __builtin__.property(_get_lsp_config_path, _set_lsp_config_path)
lsp_config_reoptimize_timer_configured = __builtin__.property(_get_lsp_config_reoptimize_timer_configured, _set_lsp_config_reoptimize_timer_configured)
lsp_config_reoptimize_timer = __builtin__.property(_get_lsp_config_reoptimize_timer, _set_lsp_config_reoptimize_timer)
lsp_config_tspec_mtu_configured = __builtin__.property(_get_lsp_config_tspec_mtu_configured, _set_lsp_config_tspec_mtu_configured)
lsp_config_tspec_mtu = __builtin__.property(_get_lsp_config_tspec_mtu, _set_lsp_config_tspec_mtu)
lsp_config_cos_configured = __builtin__.property(_get_lsp_config_cos_configured, _set_lsp_config_cos_configured)
lsp_config_cos = __builtin__.property(_get_lsp_config_cos, _set_lsp_config_cos)
lsp_config_mtu_configured = __builtin__.property(_get_lsp_config_mtu_configured, _set_lsp_config_mtu_configured)
lsp_config_mtu = __builtin__.property(_get_lsp_config_mtu, _set_lsp_config_mtu)
lsp_config_tie_breaking_configured = __builtin__.property(_get_lsp_config_tie_breaking_configured, _set_lsp_config_tie_breaking_configured)
lsp_config_tie_break_random = __builtin__.property(_get_lsp_config_tie_break_random, _set_lsp_config_tie_break_random)
lsp_config_tie_break_least_fill = __builtin__.property(_get_lsp_config_tie_break_least_fill, _set_lsp_config_tie_break_least_fill)
lsp_config_tie_break_most_fill = __builtin__.property(_get_lsp_config_tie_break_most_fill, _set_lsp_config_tie_break_most_fill)
lsp_config_cspf_disabled = __builtin__.property(_get_lsp_config_cspf_disabled, _set_lsp_config_cspf_disabled)
lsp_config_hot_standby = __builtin__.property(_get_lsp_config_hot_standby, _set_lsp_config_hot_standby)
lsp_config_pinned = __builtin__.property(_get_lsp_config_pinned, _set_lsp_config_pinned)
lsp_config_persistenct = __builtin__.property(_get_lsp_config_persistenct, _set_lsp_config_persistenct)
lsp_config_frr_global_revertive = __builtin__.property(_get_lsp_config_frr_global_revertive, _set_lsp_config_frr_global_revertive)
lsp_config_frr_hold_time = __builtin__.property(_get_lsp_config_frr_hold_time, _set_lsp_config_frr_hold_time)
lsp_config_soft_prempt = __builtin__.property(_get_lsp_config_soft_prempt, _set_lsp_config_soft_prempt)
lsp_config_exclude_interface_change = __builtin__.property(_get_lsp_config_exclude_interface_change, _set_lsp_config_exclude_interface_change)
lsp_config_prority_configured = __builtin__.property(_get_lsp_config_prority_configured, _set_lsp_config_prority_configured)
lsp_config_setup_prority = __builtin__.property(_get_lsp_config_setup_prority, _set_lsp_config_setup_prority)
lsp_config_holding_prority = __builtin__.property(_get_lsp_config_holding_prority, _set_lsp_config_holding_prority)
lsp_config_hop_limit_configured = __builtin__.property(_get_lsp_config_hop_limit_configured, _set_lsp_config_hop_limit_configured)
lsp_config_hop_limit = __builtin__.property(_get_lsp_config_hop_limit, _set_lsp_config_hop_limit)
lsp_config_traffic_eng_rate_configured = __builtin__.property(_get_lsp_config_traffic_eng_rate_configured, _set_lsp_config_traffic_eng_rate_configured)
lsp_config_traffic_eng_mean_rate = __builtin__.property(_get_lsp_config_traffic_eng_mean_rate, _set_lsp_config_traffic_eng_mean_rate)
lsp_config_traffic_eng_max_rate = __builtin__.property(_get_lsp_config_traffic_eng_max_rate, _set_lsp_config_traffic_eng_max_rate)
lsp_config_traffic_eng_max_burst = __builtin__.property(_get_lsp_config_traffic_eng_max_burst, _set_lsp_config_traffic_eng_max_burst)
lsp_config_admin_group_configured = __builtin__.property(_get_lsp_config_admin_group_configured, _set_lsp_config_admin_group_configured)
lsp_config_admin_groups = __builtin__.property(_get_lsp_config_admin_groups, _set_lsp_config_admin_groups)
lsp_path_computed_by_cspf = __builtin__.property(_get_lsp_path_computed_by_cspf, _set_lsp_path_computed_by_cspf)
lsp_path_computed_by_interface_constraint = __builtin__.property(_get_lsp_path_computed_by_interface_constraint, _set_lsp_path_computed_by_interface_constraint)
lsp_cspf_computation_mode_default = __builtin__.property(_get_lsp_cspf_computation_mode_default, _set_lsp_cspf_computation_mode_default)
lsp_cspf_computation_mode_use_bypass_metric = __builtin__.property(_get_lsp_cspf_computation_mode_use_bypass_metric, _set_lsp_cspf_computation_mode_use_bypass_metric)
lsp_cspf_computation_mode_use_bypass_liberal = __builtin__.property(_get_lsp_cspf_computation_mode_use_bypass_liberal, _set_lsp_cspf_computation_mode_use_bypass_liberal)
lsp_cspf_group_computation_mode_default = __builtin__.property(_get_lsp_cspf_group_computation_mode_default, _set_lsp_cspf_group_computation_mode_default)
lsp_cspf_group_computation_mode_add_penalty = __builtin__.property(_get_lsp_cspf_group_computation_mode_add_penalty, _set_lsp_cspf_group_computation_mode_add_penalty)
lsp_cspf_group_computation_mode_exclude_groups = __builtin__.property(_get_lsp_cspf_group_computation_mode_exclude_groups, _set_lsp_cspf_group_computation_mode_exclude_groups)
lsp_cspf_group_computation_mode_high_cost = __builtin__.property(_get_lsp_cspf_group_computation_mode_high_cost, _set_lsp_cspf_group_computation_mode_high_cost)
lsp_cspf_path_cost = __builtin__.property(_get_lsp_cspf_path_cost, _set_lsp_cspf_path_cost)
lsp_cspf_path_area = __builtin__.property(_get_lsp_cspf_path_area, _set_lsp_cspf_path_area)
lsp_cspf_computation_error = __builtin__.property(_get_lsp_cspf_computation_error, _set_lsp_cspf_computation_error)
lsp_cspf_path_hops = __builtin__.property(_get_lsp_cspf_path_hops, _set_lsp_cspf_path_hops)
lsp_cspf_exclude_hops_present = __builtin__.property(_get_lsp_cspf_exclude_hops_present, _set_lsp_cspf_exclude_hops_present)
lsp_cspf_exclude_hops = __builtin__.property(_get_lsp_cspf_exclude_hops, _set_lsp_cspf_exclude_hops)
lsp_rsvp_session_present = __builtin__.property(_get_lsp_rsvp_session_present, _set_lsp_rsvp_session_present)
lsp_rsvp_session_state_up = __builtin__.property(_get_lsp_rsvp_session_state_up, _set_lsp_rsvp_session_state_up)
lsp_rsvp_session_state = __builtin__.property(_get_lsp_rsvp_session_state, _set_lsp_rsvp_session_state)
lsp_rsvp_session_path_error_code = __builtin__.property(_get_lsp_rsvp_session_path_error_code, _set_lsp_rsvp_session_path_error_code)
lsp_rsvp_session_path_error_value = __builtin__.property(_get_lsp_rsvp_session_path_error_value, _set_lsp_rsvp_session_path_error_value)
lsp_rsvp_session_path_error_node_address = __builtin__.property(_get_lsp_rsvp_session_path_error_node_address, _set_lsp_rsvp_session_path_error_node_address)
lsp_rsvp_session_rro_hops_present = __builtin__.property(_get_lsp_rsvp_session_rro_hops_present, _set_lsp_rsvp_session_rro_hops_present)
lsp_rsvp_session_rro_hops = __builtin__.property(_get_lsp_rsvp_session_rro_hops, _set_lsp_rsvp_session_rro_hops)
lsp_maximum_bandwidth = __builtin__.property(_get_lsp_maximum_bandwidth, _set_lsp_maximum_bandwidth)
lsp_unreserved_priority_0_bandwidth = __builtin__.property(_get_lsp_unreserved_priority_0_bandwidth, _set_lsp_unreserved_priority_0_bandwidth)
lsp_unreserved_priority_1_bandwidth = __builtin__.property(_get_lsp_unreserved_priority_1_bandwidth, _set_lsp_unreserved_priority_1_bandwidth)
lsp_unreserved_priority_2_bandwidth = __builtin__.property(_get_lsp_unreserved_priority_2_bandwidth, _set_lsp_unreserved_priority_2_bandwidth)
lsp_unreserved_priority_3_bandwidth = __builtin__.property(_get_lsp_unreserved_priority_3_bandwidth, _set_lsp_unreserved_priority_3_bandwidth)
lsp_unreserved_priority_4_bandwidth = __builtin__.property(_get_lsp_unreserved_priority_4_bandwidth, _set_lsp_unreserved_priority_4_bandwidth)
lsp_unreserved_priority_5_bandwidth = __builtin__.property(_get_lsp_unreserved_priority_5_bandwidth, _set_lsp_unreserved_priority_5_bandwidth)
lsp_unreserved_priority_6_bandwidth = __builtin__.property(_get_lsp_unreserved_priority_6_bandwidth, _set_lsp_unreserved_priority_6_bandwidth)
lsp_unreserved_priority_7_bandwidth = __builtin__.property(_get_lsp_unreserved_priority_7_bandwidth, _set_lsp_unreserved_priority_7_bandwidth)
_pyangbind_elements = {'lsp_instance_number': lsp_instance_number, 'lsp_current_instance': lsp_current_instance, 'lsp_new_instance': lsp_new_instance, 'lsp_old_instance': lsp_old_instance, 'lsp_is_primary': lsp_is_primary, 'lsp_is_current_secondary': lsp_is_current_secondary, 'lsp_is_selected_secondary': lsp_is_selected_secondary, 'lsp_config_path_configured': lsp_config_path_configured, 'lsp_config_path': lsp_config_path, 'lsp_config_reoptimize_timer_configured': lsp_config_reoptimize_timer_configured, 'lsp_config_reoptimize_timer': lsp_config_reoptimize_timer, 'lsp_config_tspec_mtu_configured': lsp_config_tspec_mtu_configured, 'lsp_config_tspec_mtu': lsp_config_tspec_mtu, 'lsp_config_cos_configured': lsp_config_cos_configured, 'lsp_config_cos': lsp_config_cos, 'lsp_config_mtu_configured': lsp_config_mtu_configured, 'lsp_config_mtu': lsp_config_mtu, 'lsp_config_tie_breaking_configured': lsp_config_tie_breaking_configured, 'lsp_config_tie_break_random': lsp_config_tie_break_random, 'lsp_config_tie_break_least_fill': lsp_config_tie_break_least_fill, 'lsp_config_tie_break_most_fill': lsp_config_tie_break_most_fill, 'lsp_config_cspf_disabled': lsp_config_cspf_disabled, 'lsp_config_hot_standby': lsp_config_hot_standby, 'lsp_config_pinned': lsp_config_pinned, 'lsp_config_persistenct': lsp_config_persistenct, 'lsp_config_frr_global_revertive': lsp_config_frr_global_revertive, 'lsp_config_frr_hold_time': lsp_config_frr_hold_time, 'lsp_config_soft_prempt': lsp_config_soft_prempt, 'lsp_config_exclude_interface_change': lsp_config_exclude_interface_change, 'lsp_config_prority_configured': lsp_config_prority_configured, 'lsp_config_setup_prority': lsp_config_setup_prority, 'lsp_config_holding_prority': lsp_config_holding_prority, 'lsp_config_hop_limit_configured': lsp_config_hop_limit_configured, 'lsp_config_hop_limit': lsp_config_hop_limit, 'lsp_config_traffic_eng_rate_configured': lsp_config_traffic_eng_rate_configured, 'lsp_config_traffic_eng_mean_rate': lsp_config_traffic_eng_mean_rate, 'lsp_config_traffic_eng_max_rate': lsp_config_traffic_eng_max_rate, 'lsp_config_traffic_eng_max_burst': lsp_config_traffic_eng_max_burst, 'lsp_config_admin_group_configured': lsp_config_admin_group_configured, 'lsp_config_admin_groups': lsp_config_admin_groups, 'lsp_path_computed_by_cspf': lsp_path_computed_by_cspf, 'lsp_path_computed_by_interface_constraint': lsp_path_computed_by_interface_constraint, 'lsp_cspf_computation_mode_default': lsp_cspf_computation_mode_default, 'lsp_cspf_computation_mode_use_bypass_metric': lsp_cspf_computation_mode_use_bypass_metric, 'lsp_cspf_computation_mode_use_bypass_liberal': lsp_cspf_computation_mode_use_bypass_liberal, 'lsp_cspf_group_computation_mode_default': lsp_cspf_group_computation_mode_default, 'lsp_cspf_group_computation_mode_add_penalty': lsp_cspf_group_computation_mode_add_penalty, 'lsp_cspf_group_computation_mode_exclude_groups': lsp_cspf_group_computation_mode_exclude_groups, 'lsp_cspf_group_computation_mode_high_cost': lsp_cspf_group_computation_mode_high_cost, 'lsp_cspf_path_cost': lsp_cspf_path_cost, 'lsp_cspf_path_area': lsp_cspf_path_area, 'lsp_cspf_computation_error': lsp_cspf_computation_error, 'lsp_cspf_path_hops': lsp_cspf_path_hops, 'lsp_cspf_exclude_hops_present': lsp_cspf_exclude_hops_present, 'lsp_cspf_exclude_hops': lsp_cspf_exclude_hops, 'lsp_rsvp_session_present': lsp_rsvp_session_present, 'lsp_rsvp_session_state_up': lsp_rsvp_session_state_up, 'lsp_rsvp_session_state': lsp_rsvp_session_state, 'lsp_rsvp_session_path_error_code': lsp_rsvp_session_path_error_code, 'lsp_rsvp_session_path_error_value': lsp_rsvp_session_path_error_value, 'lsp_rsvp_session_path_error_node_address': lsp_rsvp_session_path_error_node_address, 'lsp_rsvp_session_rro_hops_present': lsp_rsvp_session_rro_hops_present, 'lsp_rsvp_session_rro_hops': lsp_rsvp_session_rro_hops, 'lsp_maximum_bandwidth': lsp_maximum_bandwidth, 'lsp_unreserved_priority_0_bandwidth': lsp_unreserved_priority_0_bandwidth, 'lsp_unreserved_priority_1_bandwidth': lsp_unreserved_priority_1_bandwidth, 'lsp_unreserved_priority_2_bandwidth': lsp_unreserved_priority_2_bandwidth, 'lsp_unreserved_priority_3_bandwidth': lsp_unreserved_priority_3_bandwidth, 'lsp_unreserved_priority_4_bandwidth': lsp_unreserved_priority_4_bandwidth, 'lsp_unreserved_priority_5_bandwidth': lsp_unreserved_priority_5_bandwidth, 'lsp_unreserved_priority_6_bandwidth': lsp_unreserved_priority_6_bandwidth, 'lsp_unreserved_priority_7_bandwidth': lsp_unreserved_priority_7_bandwidth, }
| 85.184147
| 4,512
| 0.78647
| 35,490
| 246,097
| 5.064835
| 0.008199
| 0.046715
| 0.045174
| 0.032044
| 0.984684
| 0.967928
| 0.942087
| 0.919589
| 0.898409
| 0.88572
| 0
| 0.009461
| 0.102346
| 246,097
| 2,888
| 4,513
| 85.213643
| 0.804223
| 0.254481
| 0
| 0.53496
| 0
| 0.051451
| 0.369424
| 0.2532
| 0
| 0
| 0
| 0
| 0
| 1
| 0.144459
| false
| 0.01715
| 0.007916
| 0
| 0.254617
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0ac953a6585165eeb4dc5cc4e27598a050e3a399
| 19,293
|
py
|
Python
|
tests/testflows/rbac/tests/privileges/insert.py
|
liuyonghengheng/ClickHouse
|
34b2a460ba9149f986feb4edcb4583f2cff9ecc0
|
[
"Apache-2.0"
] | 1
|
2020-10-08T14:58:25.000Z
|
2020-10-08T14:58:25.000Z
|
tests/testflows/rbac/tests/privileges/insert.py
|
liuyonghengheng/ClickHouse
|
34b2a460ba9149f986feb4edcb4583f2cff9ecc0
|
[
"Apache-2.0"
] | null | null | null |
tests/testflows/rbac/tests/privileges/insert.py
|
liuyonghengheng/ClickHouse
|
34b2a460ba9149f986feb4edcb4583f2cff9ecc0
|
[
"Apache-2.0"
] | null | null | null |
from contextlib import contextmanager
import json
from testflows.core import *
from testflows.asserts import error
from rbac.requirements import *
from rbac.helper.common import *
import rbac.helper.errors as errors
def input_output_equality_check(node, input_columns, input_data, table_name):
data_list = [x.strip("'") for x in input_data.split(",")]
input_dict = dict(zip(input_columns.split(","), data_list))
output_dict = json.loads(node.query(f"select {input_columns} from {table_name} format JSONEachRow").output)
output_dict = {k:str(v) for (k,v) in output_dict.items()}
return input_dict == output_dict
@TestScenario
def without_privilege(self, table_type, node=None):
"""Check that user without insert privilege on a table is not able to insert on that table.
"""
user_name = f"user_{getuid()}"
table_name = f"table_{getuid()}"
if node is None:
node = self.context.node
with table(node, table_name, table_type):
with user(node, user_name):
with When("I run INSERT without privilege"):
exitcode, message = errors.not_enough_privileges(name=user_name)
node.query(f"INSERT INTO {table_name} (d) VALUES ('2020-01-01')", settings = [("user", user_name)],
exitcode=exitcode, message=message)
@TestScenario
@Requirements(
RQ_SRS_006_RBAC_Privileges_Insert_Grant("1.0"),
)
def user_with_privilege(self, table_type, node=None):
"""Check that user can insert into a table on which they have insert privilege.
"""
user_name = f"user_{getuid()}"
table_name = f"table_{getuid()}"
if node is None:
node = self.context.node
with table(node, table_name, table_type):
with user(node, user_name):
with When("I grant insert privilege"):
node.query(f"GRANT INSERT ON {table_name} TO {user_name}")
with And("I use INSERT"):
node.query(f"INSERT INTO {table_name} (d) VALUES ('2020-01-01')", settings=[("user",user_name)])
with Then("I check the insert functioned"):
output = node.query(f"SELECT d FROM {table_name} FORMAT JSONEachRow").output
assert output == '{"d":"2020-01-01"}', error()
@TestScenario
@Requirements(
RQ_SRS_006_RBAC_Privileges_Insert_Revoke("1.0"),
)
def user_with_revoked_privilege(self, table_type, node=None):
"""Check that user is unable to insert into a table after insert privilege on that table has been revoked from user.
"""
user_name = f"user_{getuid()}"
table_name = f"table_{getuid()}"
if node is None:
node = self.context.node
with table(node, table_name, table_type):
with user(node, user_name):
with When("I grant insert privilege"):
node.query(f"GRANT INSERT ON {table_name} TO {user_name}")
with And("I revoke insert privilege"):
node.query(f"REVOKE INSERT ON {table_name} FROM {user_name}")
with And("I use INSERT"):
exitcode, message = errors.not_enough_privileges(name=user_name)
node.query(f"INSERT INTO {table_name} (d) VALUES ('2020-01-01')",
settings=[("user",user_name)], exitcode=exitcode, message=message)
@TestScenario
def user_with_privilege_on_columns(self, table_type):
Scenario(run=user_column_privileges,
examples=Examples("grant_columns revoke_columns insert_columns_fail insert_columns_pass data_fail data_pass table_type",
[tuple(list(row)+[table_type]) for row in user_column_privileges.examples]))
@TestOutline
@Requirements(
RQ_SRS_006_RBAC_Privileges_Insert_Column("1.0"),
)
@Examples("grant_columns revoke_columns insert_columns_fail insert_columns_pass data_fail data_pass", [
("d", "d", "x", "d", '\'woo\'', '\'2020-01-01\''),
("d,a", "d", "x", "d", '\'woo\'', '\'2020-01-01\''),
("d,a,b", "d,a,b", "x", "d,b", '\'woo\'', '\'2020-01-01\',9'),
("d,a,b", "b", "y", "d,a,b", '9', '\'2020-01-01\',\'woo\',9')
])
def user_column_privileges(self, grant_columns, insert_columns_pass, data_fail, data_pass, table_type,
revoke_columns=None, insert_columns_fail=None, node=None):
"""Check that user is able to insert on columns where insert privilege is granted
and unable to insert on columns where insert privilege has not been granted or has been revoked.
"""
user_name = f"user_{getuid()}"
table_name = f"table_{getuid()}"
if node is None:
node = self.context.node
with table(node, table_name, table_type):
with user(node, user_name):
with When("I grant insert privilege"):
node.query(f"GRANT INSERT({grant_columns}) ON {table_name} TO {user_name}")
if insert_columns_fail is not None:
with And("I insert into a column without insert privilege"):
exitcode, message = errors.not_enough_privileges(name=user_name)
node.query(f"INSERT INTO {table_name} ({insert_columns_fail}) VALUES ({data_fail})",
settings=[("user",user_name)], exitcode=exitcode, message=message)
with And("I insert into granted column"):
node.query(f"INSERT INTO {table_name} ({insert_columns_pass}) VALUES ({data_pass})",
settings=[("user",user_name)])
with Then("I check the insert functioned"):
input_equals_output = input_output_equality_check(node, insert_columns_pass, data_pass, table_name)
assert input_equals_output, error()
if revoke_columns is not None:
with When("I revoke insert privilege from columns"):
node.query(f"REVOKE INSERT({revoke_columns}) ON {table_name} FROM {user_name}")
with And("I insert into revoked columns"):
exitcode, message = errors.not_enough_privileges(name=user_name)
node.query(f"INSERT INTO {table_name} ({insert_columns_pass}) VALUES ({data_pass})",
settings=[("user",user_name)], exitcode=exitcode, message=message)
@TestScenario
@Requirements(
RQ_SRS_006_RBAC_Privileges_Insert_Grant("1.0"),
)
def role_with_privilege(self, table_type, node=None):
"""Check that user can insert into a table after being granted a role that
has the insert privilege for that table.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
table_name = f"table_{getuid()}"
if node is None:
node = self.context.node
with table(node, table_name, table_type):
with user(node, user_name), role(node, role_name):
with When("I grant insert privilege to a role"):
node.query(f"GRANT INSERT ON {table_name} TO {role_name}")
with And("I grant the role to a user"):
node.query(f"GRANT {role_name} TO {user_name}")
with And("I insert into the table"):
node.query(f"INSERT INTO {table_name} (d) VALUES ('2020-01-01')", settings=[("user",user_name)])
with Then("I check the data matches the input"):
output = node.query(f"SELECT d FROM {table_name} FORMAT JSONEachRow").output
assert output == '{"d":"2020-01-01"}', error()
@TestScenario
@Requirements(
RQ_SRS_006_RBAC_Privileges_Insert_Revoke("1.0"),
)
def role_with_revoked_privilege(self, table_type, node=None):
"""Check that user with a role that has insert privilege on a table
is unable to insert into that table after insert privilege
has been revoked from the role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
table_name = f"table_{getuid()}"
if node is None:
node = self.context.node
with table(node, table_name, table_type):
with user(node, user_name), role(node, role_name):
with When("I grant privilege to a role"):
node.query(f"GRANT INSERT ON {table_name} TO {role_name}")
with And("I grant the role to a user"):
node.query(f"GRANT {role_name} TO {user_name}")
with And("I revoke privilege from the role"):
node.query(f"REVOKE INSERT ON {table_name} FROM {role_name}")
with And("I insert into the table"):
exitcode, message = errors.not_enough_privileges(name=user_name)
node.query(f"INSERT INTO {table_name} (d) VALUES ('2020-01-01')",
settings=[("user",user_name)], exitcode=exitcode, message=message)
@TestScenario
def user_with_revoked_role(self, table_type, node=None):
"""Check that user with a role that has insert privilege on a table
is unable to insert into that table after the role has been revoked from the user.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
table_name = f"table_{getuid()}"
if node is None:
node = self.context.node
with table(node, table_name, table_type):
with user(node, user_name), role(node, role_name):
with When("I grant privilege to a role"):
node.query(f"GRANT INSERT ON {table_name} TO {role_name}")
with And("I grant the role to a user"):
node.query(f"GRANT {role_name} TO {user_name}")
with And("I revoke the role from the user"):
node.query(f"REVOKE {role_name} FROM {user_name}")
with And("I insert into the table"):
exitcode, message = errors.not_enough_privileges(name=user_name)
node.query(f"INSERT INTO {table_name} (d) VALUES ('2020-01-01')",
settings=[("user",user_name)], exitcode=exitcode, message=message)
@TestScenario
def role_with_privilege_on_columns(self, table_type):
Scenario(run=role_column_privileges,
examples=Examples("grant_columns revoke_columns insert_columns_fail insert_columns_pass data_fail data_pass table_type",
[tuple(list(row)+[table_type]) for row in role_column_privileges.examples]))
@TestOutline
@Requirements(
RQ_SRS_006_RBAC_Privileges_Insert_Column("1.0"),
)
@Examples("grant_columns revoke_columns insert_columns_fail insert_columns_pass data_fail data_pass", [
("d", "d", "x", "d", '\'woo\'', '\'2020-01-01\''),
("d,a", "d", "x", "d", '\'woo\'', '\'2020-01-01\''),
("d,a,b", "d,a,b", "x", "d,b", '\'woo\'', '\'2020-01-01\',9'),
("d,a,b", "b", "y", "d,a,b", '9', '\'2020-01-01\',\'woo\',9')
])
def role_column_privileges(self, grant_columns, insert_columns_pass, data_fail, data_pass,
table_type, revoke_columns=None, insert_columns_fail=None, node=None):
"""Check that user with a role is able to insert on columns where insert privilege is granted to the role
and unable to insert on columns where insert privilege has not been granted or has been revoked from the role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
table_name = f"table_{getuid()}"
if node is None:
node = self.context.node
with table(node, table_name, table_type):
with user(node, user_name), role(node, role_name):
with When("I grant insert privilege"):
node.query(f"GRANT INSERT({grant_columns}) ON {table_name} TO {role_name}")
with And("I grant the role to a user"):
node.query(f"GRANT {role_name} TO {user_name}")
if insert_columns_fail is not None:
with And("I insert into columns without insert privilege"):
exitcode, message = errors.not_enough_privileges(name=user_name)
node.query(f"INSERT INTO {table_name} ({insert_columns_fail}) VALUES ({data_fail})",
settings=[("user",user_name)], exitcode=exitcode, message=message)
with And("I insert into granted column"):
node.query(f"INSERT INTO {table_name} ({insert_columns_pass}) VALUES ({data_pass})",
settings=[("user",user_name)])
with Then("I check the insert functioned"):
input_equals_output = input_output_equality_check(node, insert_columns_pass, data_pass, table_name)
assert input_equals_output, error()
if revoke_columns is not None:
with When("I revoke insert privilege from columns"):
node.query(f"REVOKE INSERT({revoke_columns}) ON {table_name} FROM {role_name}")
with And("I insert into revoked columns"):
exitcode, message = errors.not_enough_privileges(name=user_name)
node.query(f"INSERT INTO {table_name} ({insert_columns_pass}) VALUES ({data_pass})",
settings=[("user",user_name)], exitcode=exitcode, message=message)
@TestScenario
@Requirements(
RQ_SRS_006_RBAC_Privileges_Insert_Cluster("1.0"),
)
def user_with_privilege_on_cluster(self, table_type, node=None):
"""Check that user is able or unable to insert into a table
depending whether insert privilege is granted or revoked on a cluster.
"""
user_name = f"user_{getuid()}"
table_name = f"table_{getuid()}"
if node is None:
node = self.context.node
with table(node, table_name, table_type):
try:
with Given("I have a user on a cluster"):
node.query(f"CREATE USER OR REPLACE {user_name} ON CLUSTER sharded_cluster")
with When("I grant insert privilege on a cluster without the node with the table"):
node.query(f"GRANT ON CLUSTER sharded_cluster23 INSERT ON {table_name} TO {user_name}")
with And("I insert into the table expecting a fail"):
exitcode, message = errors.not_enough_privileges(name=user_name)
node.query(f"INSERT INTO {table_name} (d) VALUES ('2020-01-01')", settings=[("user",user_name)],
exitcode=exitcode, message=message)
with And("I grant insert privilege on cluster including all nodes"):
node.query(f"GRANT ON CLUSTER sharded_cluster INSERT ON {table_name} TO {user_name}")
with And("I revoke insert privilege on cluster without the node with the table"):
node.query(f"REVOKE ON CLUSTER sharded_cluster23 INSERT ON {table_name} FROM {user_name}")
with And("I insert into the table"):
node.query(f"INSERT INTO {table_name} (d) VALUES ('2020-01-01')", settings=[("user",user_name)])
with And("I check that I can read inserted data"):
output = node.query(f"SELECT d FROM {table_name} FORMAT JSONEachRow").output
assert output == '{"d":"2020-01-01"}', error()
with And("I revoke insert privilege on cluster with all nodes"):
node.query(f"REVOKE ON CLUSTER sharded_cluster INSERT ON {table_name} FROM {user_name}")
with Then("I insert into table expecting fail"):
exitcode, message = errors.not_enough_privileges(name=user_name)
node.query(f"INSERT INTO {table_name} (d) VALUES ('2020-01-01')", settings=[("user",user_name)],
exitcode=exitcode, message=message)
finally:
with Finally("I drop the user"):
node.query(f"DROP USER {user_name} ON CLUSTER sharded_cluster")
@TestScenario
@Requirements(
RQ_SRS_006_RBAC_Privileges_Insert_Cluster("1.0"),
)
def role_with_privilege_on_cluster(self, table_type, node=None):
"""Check that user with role is able to insert into a table
depending whether insert privilege granted or revoked from the role on the cluster.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
table_name = f"table_{getuid()}"
if node is None:
node = self.context.node
with table(node, table_name, table_type):
try:
with Given("I have a user on a cluster"):
node.query(f"CREATE USER OR REPLACE {user_name} ON CLUSTER sharded_cluster")
with And("I have a role on a cluster"):
node.query(f"CREATE ROLE OR REPLACE {role_name} ON CLUSTER sharded_cluster")
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
with And("I grant insert privilege on a cluster without the node with the table"):
node.query(f"GRANT ON CLUSTER sharded_cluster23 INSERT ON {table_name} TO {role_name}")
with And("I insert into the table expecting a fail"):
exitcode, message = errors.not_enough_privileges(name=user_name)
node.query(f"INSERT INTO {table_name} (d) VALUES ('2020-01-01')", settings=[("user",user_name)],
exitcode=exitcode, message=message)
with And("I grant insert privilege on cluster including all nodes"):
node.query(f"GRANT ON CLUSTER sharded_cluster INSERT ON {table_name} TO {role_name}")
with And("I revoke insert privilege on cluster without the table node"):
node.query(f"REVOKE ON CLUSTER sharded_cluster23 INSERT ON {table_name} FROM {role_name}")
with And("I insert into the table"):
node.query(f"INSERT INTO {table_name} (d) VALUES ('2020-01-01')", settings=[("user",user_name)])
with And("I check that I can read inserted data"):
output = node.query(f"SELECT d FROM {table_name} FORMAT JSONEachRow").output
assert output == '{"d":"2020-01-01"}', error()
with And("I revoke insert privilege on cluster with all nodes"):
node.query(f"REVOKE ON CLUSTER sharded_cluster INSERT ON {table_name} FROM {role_name}")
with Then("I insert into table expecting fail"):
exitcode, message = errors.not_enough_privileges(name=user_name)
node.query(f"INSERT INTO {table_name} (d) VALUES ('2020-01-01')", settings=[("user",user_name)],
exitcode=exitcode, message=message)
finally:
with Finally("I drop the user"):
node.query(f"DROP USER {user_name} ON CLUSTER sharded_cluster")
@TestOutline(Feature)
@Requirements(
RQ_SRS_006_RBAC_Privileges_Insert("1.0"),
RQ_SRS_006_RBAC_Privileges_Insert_TableEngines("1.0")
)
@Examples("table_type", [
(key,) for key in table_types.keys()
])
@Flags(TE)
@Name("insert")
def feature(self, table_type, parallel=None, stress=None, node="clickhouse1"):
self.context.node = self.context.cluster.node(node)
self.context.node1 = self.context.cluster.node("clickhouse1")
self.context.node2 = self.context.cluster.node("clickhouse2")
self.context.node3 = self.context.cluster.node("clickhouse3")
if stress is not None:
self.context.stress = stress
if parallel is not None:
self.context.stress = parallel
tasks = []
pool = Pool(10)
try:
for scenario in loads(current_module(), Scenario):
run_scenario(pool, tasks, Scenario(test=scenario, setup=instrument_clickhouse_server_log), {"table_type" : table_type})
finally:
join(tasks)
| 52.42663
| 131
| 0.636811
| 2,646
| 19,293
| 4.465986
| 0.060847
| 0.045358
| 0.044851
| 0.023356
| 0.885758
| 0.876449
| 0.858763
| 0.849793
| 0.848354
| 0.821782
| 0
| 0.018176
| 0.244285
| 19,293
| 367
| 132
| 52.569482
| 0.792318
| 0.070751
| 0
| 0.713836
| 0
| 0
| 0.323055
| 0.01303
| 0
| 0
| 0
| 0
| 0.022013
| 1
| 0.044025
| false
| 0.037736
| 0.022013
| 0
| 0.069182
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0af38d81a60520a6832be86dfe345c1c059a496f
| 5,396
|
py
|
Python
|
robogym/envs/rearrange/tests/test_object_off_table.py
|
0xflotus/robogym
|
5ec2fcbda9828941fe3072792dd25fb5a915bbbb
|
[
"MIT"
] | 288
|
2020-11-12T21:39:34.000Z
|
2022-03-19T23:27:50.000Z
|
robogym/envs/rearrange/tests/test_object_off_table.py
|
0xflotus/robogym
|
5ec2fcbda9828941fe3072792dd25fb5a915bbbb
|
[
"MIT"
] | 3
|
2020-12-12T19:19:30.000Z
|
2022-03-24T05:21:39.000Z
|
robogym/envs/rearrange/tests/test_object_off_table.py
|
0xflotus/robogym
|
5ec2fcbda9828941fe3072792dd25fb5a915bbbb
|
[
"MIT"
] | 31
|
2020-11-12T22:31:01.000Z
|
2022-02-28T20:34:48.000Z
|
import pytest
from robogym.envs.rearrange.composer import make_env
class ObjectOffTableTest:
def __init__(self):
env = make_env()
env.reset()
self.sim = env.unwrapped.mujoco_simulation
@pytest.mark.parametrize(
"object_positions,expected_off_table",
[
([[1.3, 0.75, 0.4]], [False]),
([[1.05, 0.4, 0.4]], [False]),
([[1.05, 1.1, 0.4]], [False]),
([[1.55, 0.4, 0.4]], [False]),
([[1.55, 1.1, 0.4]], [False]),
],
)
def test_single_obj_on_table(self, object_positions, expected_off_table):
assert expected_off_table == self.sim.check_objects_off_table(object_positions)
@pytest.mark.parametrize(
"object_positions,expected_off_table",
[
([[1.3, 0.75, 0.299]], [True]),
([[1.05, 0.4, 0.299]], [True]),
([[1.05, 1.1, 0.299]], [True]),
([[1.55, 0.4, 0.299]], [True]),
([[1.55, 1.1, 0.299]], [True]),
],
)
def test_single_obj_under_table(self, object_positions, expected_off_table):
assert expected_off_table == self.sim.check_objects_off_table(object_positions)
@pytest.mark.parametrize(
"object_positions,expected_off_table",
[
([[-0.1 + 1.05, 0.4, 0.4]], [True]),
([[1.05, -0.1 + 0.4, 0.4]], [True]),
([[-0.1 + 1.05, 1.1, 0.4]], [True]),
([[1.05, +0.1 + 1.1, 0.4]], [True]),
([[+0.1 + 1.55, 0.4, 0.4]], [True]),
([[1.55, -0.1 + 0.4, 0.4]], [True]),
([[+0.1 + 1.55, 1.1, 0.4]], [True]),
([[1.55, +0.1 + 1.1, 0.4]], [True]),
],
)
def test_single_obj_outside_table_hor(self, object_positions, expected_off_table):
assert expected_off_table == self.sim.check_objects_off_table(object_positions)
@pytest.mark.parametrize(
"object_positions,expected_off_table",
[
([[-0.1 + 1.05, 0.4, 0.299]], [True]),
([[1.05, -0.1 + 0.4, 0.299]], [True]),
([[-0.1 + 1.05, 1.1, 0.299]], [True]),
([[1.05, +0.1 + 1.1, 0.299]], [True]),
([[+0.1 + 1.55, 0.4, 0.299]], [True]),
([[1.55, -0.1 + 0.4, 0.299]], [True]),
([[+0.1 + 1.55, 1.1, 0.299]], [True]),
([[1.55, +0.1 + 1.1, 0.299]], [True]),
],
)
def test_single_obj_outside_and_under(self, object_positions, expected_off_table):
assert expected_off_table == self.sim.check_objects_off_table(object_positions)
# Multiple objects
@pytest.mark.parametrize(
"object_positions,expected_off_table",
[
([[1.3, 0.75, 0.4]], [False]),
([[1.05, 0.4, 0.4]], [False]),
([[1.05, 1.1, 0.4]], [False]),
([[1.55, 0.4, 0.4]], [False]),
([[1.55, 1.1, 0.4]], [False]),
],
)
def test_mul_obj_on_table(self, object_positions, expected_off_table):
object_positions.append([1.3, 0.75, 0.4])
expected_off_table.append(False)
assert (
expected_off_table == self.sim.check_objects_off_table(object_positions)
).all()
@pytest.mark.parametrize(
"object_positions,expected_off_table",
[
([[1.3, 0.75, 0.299]], [True]),
([[1.05, 0.4, 0.299]], [True]),
([[1.05, 1.1, 0.299]], [True]),
([[1.55, 0.4, 0.299]], [True]),
([[1.55, 1.1, 0.299]], [True]),
],
)
def test_mul_obj_under_table(self, object_positions, expected_off_table):
object_positions.append([1.3, 0.75, 0.4])
expected_off_table.append(False)
assert (
expected_off_table == self.sim.check_objects_off_table(object_positions)
).all()
@pytest.mark.parametrize(
"object_positions,expected_off_table",
[
([[-0.1 + 1.05, 0.4, 0.4]], [True]),
([[1.05, -0.1 + 0.4, 0.4]], [True]),
([[-0.1 + 1.05, 1.1, 0.4]], [True]),
([[1.05, +0.1 + 1.1, 0.4]], [True]),
([[+0.1 + 1.55, 0.4, 0.4]], [True]),
([[1.55, -0.1 + 0.4, 0.4]], [True]),
([[+0.1 + 1.55, 1.1, 0.4]], [True]),
([[1.55, +0.1 + 1.1, 0.4]], [True]),
],
)
def test_mul_obj_outside_table_hor(self, object_positions, expected_off_table):
object_positions.append([1.3, 0.75, 0.4])
expected_off_table.append(False)
assert (
expected_off_table == self.sim.check_objects_off_table(object_positions)
).all()
@pytest.mark.parametrize(
"object_positions,expected_off_table",
[
([[-0.1 + 1.05, 0.4, 0.299]], [True]),
([[1.05, -0.1 + 0.4, 0.299]], [True]),
([[-0.1 + 1.05, 1.1, 0.299]], [True]),
([[1.05, +0.1 + 1.1, 0.299]], [True]),
([[+0.1 + 1.55, 0.4, 0.299]], [True]),
([[1.55, -0.1 + 0.4, 0.299]], [True]),
([[+0.1 + 1.55, 1.1, 0.299]], [True]),
([[1.55, +0.1 + 1.1, 0.299]], [True]),
],
)
def test_mul_obj_outside_and_under(self, object_positions, expected_off_table):
object_positions.append([1.3, 0.75, 0.4])
expected_off_table.append(False)
assert (
expected_off_table == self.sim.check_objects_off_table(object_positions)
).all()
| 36.214765
| 87
| 0.492587
| 776
| 5,396
| 3.23067
| 0.065722
| 0.043079
| 0.1787
| 0.165935
| 0.932988
| 0.932988
| 0.929398
| 0.929398
| 0.929398
| 0.911847
| 0
| 0.133403
| 0.294292
| 5,396
| 148
| 88
| 36.459459
| 0.524947
| 0.002965
| 0
| 0.763359
| 0
| 0
| 0.052064
| 0.052064
| 0
| 0
| 0
| 0
| 0.061069
| 1
| 0.068702
| false
| 0
| 0.015267
| 0
| 0.091603
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.