hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1b048db16e120a67568f6689e1288a8cac7f648d
| 62
|
py
|
Python
|
fluent_python/testimport/aaaa/b.py
|
MonsterRob/python_book
|
b419aac01bf2070c31098d3d81b40b57ae292f11
|
[
"MIT"
] | null | null | null |
fluent_python/testimport/aaaa/b.py
|
MonsterRob/python_book
|
b419aac01bf2070c31098d3d81b40b57ae292f11
|
[
"MIT"
] | null | null | null |
fluent_python/testimport/aaaa/b.py
|
MonsterRob/python_book
|
b419aac01bf2070c31098d3d81b40b57ae292f11
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
class B:
print('B is imported')
| 10.333333
| 26
| 0.516129
| 9
| 62
| 3.555556
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021739
| 0.258065
| 62
| 5
| 27
| 12.4
| 0.673913
| 0.33871
| 0
| 0
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 1
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
1b389db0baad26c3152a85595c3d28e9f14e9a11
| 78
|
py
|
Python
|
scripts/fact100.py
|
Marlon-Lazo-Coronado/tiny-bignum-c
|
b5ef2beb2010f5d5dd1a57fc3515e3d6e5fc97ad
|
[
"Unlicense"
] | 331
|
2017-10-28T08:33:54.000Z
|
2022-03-17T08:22:49.000Z
|
scripts/fact100.py
|
Marlon-Lazo-Coronado/tiny-bignum-c
|
b5ef2beb2010f5d5dd1a57fc3515e3d6e5fc97ad
|
[
"Unlicense"
] | 25
|
2017-11-11T22:26:22.000Z
|
2021-12-22T09:47:28.000Z
|
scripts/fact100.py
|
Marlon-Lazo-Coronado/tiny-bignum-c
|
b5ef2beb2010f5d5dd1a57fc3515e3d6e5fc97ad
|
[
"Unlicense"
] | 74
|
2017-12-18T18:59:39.000Z
|
2022-01-27T11:22:56.000Z
|
import math
print("factorial(100) using Python = %.0x" % math.factorial(100))
| 26
| 65
| 0.717949
| 11
| 78
| 5.090909
| 0.727273
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101449
| 0.115385
| 78
| 2
| 66
| 39
| 0.710145
| 0
| 0
| 0
| 0
| 0
| 0.435897
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
1b8303699a656abff936fc954196e14e0a301514
| 91
|
py
|
Python
|
tests/guinea-pigs/nose/teardown_function_error/testa.py
|
djeebus/teamcity-python
|
b4d38efc1f2c8269128715bf084de9a2d463a922
|
[
"Apache-2.0"
] | 105
|
2015-06-24T15:40:41.000Z
|
2022-02-04T10:30:34.000Z
|
tests/guinea-pigs/nose/teardown_function_error/testa.py
|
djeebus/teamcity-python
|
b4d38efc1f2c8269128715bf084de9a2d463a922
|
[
"Apache-2.0"
] | 145
|
2015-06-24T15:26:28.000Z
|
2022-03-22T20:04:19.000Z
|
tests/guinea-pigs/nose/teardown_function_error/testa.py
|
djeebus/teamcity-python
|
b4d38efc1f2c8269128715bf084de9a2d463a922
|
[
"Apache-2.0"
] | 76
|
2015-07-20T08:18:21.000Z
|
2022-03-18T20:03:53.000Z
|
def teardown_func():
assert 1 == 0
def test():
pass
test.teardown = teardown_func
| 13
| 29
| 0.659341
| 13
| 91
| 4.461538
| 0.615385
| 0.413793
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028571
| 0.230769
| 91
| 6
| 30
| 15.166667
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.4
| true
| 0.2
| 0
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
1bb5e304c3a59b939d661223ff68d4692fbfad8e
| 16,648
|
py
|
Python
|
venv/Lib/site-packages/fdutil/unittests/test_dict_tools.py
|
avim2809/CameraSiteBlocker
|
bfc0434e75e8f3f95c459a4adc86b7673200816e
|
[
"Apache-2.0"
] | null | null | null |
venv/Lib/site-packages/fdutil/unittests/test_dict_tools.py
|
avim2809/CameraSiteBlocker
|
bfc0434e75e8f3f95c459a4adc86b7673200816e
|
[
"Apache-2.0"
] | null | null | null |
venv/Lib/site-packages/fdutil/unittests/test_dict_tools.py
|
avim2809/CameraSiteBlocker
|
bfc0434e75e8f3f95c459a4adc86b7673200816e
|
[
"Apache-2.0"
] | null | null | null |
# encoding: utf-8
import unittest
from fdutil import dict_tools
class TestFilterDict(unittest.TestCase):
def setUp(self):
self.input_dict = {u"e1": {u"API": u"A", u"ENV": u"1", u"PARAMS": u"x, y, z"},
u"e2": {u"API": u"B", u"ENV": u"1", u"PARAMS": u"w, x"},
u"e3": {u"API": u"B", u"ENV": u"2", u"PARAMS": u"w, x"},
u"description1": u"Example environments parameters:",
u"description2": u"ENV - The environment to be used",
u"description3": u"ENV",
u"description4": [u"ENV", u"API", u"PARAM"],
u"description5": [u"API", u"PARAM"]
}
self.single_filter = [(u"ENV", u"1")]
self.single_filter_none = [(u"ENV", None)]
self.and_filter = [(u'API', u'B'), (u'ENV', u'2', u'AND')]
self.or_filter = [(u'API', u'B'), (u'ENV', u'2', u'OR')]
self.multi_filter = [(u'API', u'B'), (u'ENV', u'2', u'AND'), (u'PARAMS', u'x, y, z', u'OR')]
self.multi_and_filter = [(u'API', None), (u'ENV', u'1', u'AND'), (u'PARAMS', u'w, x', u'AND')]
def tearDown(self):
pass
# happy path tests
def test_no_filter_exclude_false(self):
output = dict_tools.filter_dict(src_dict=self.input_dict,
filters=[],
exclude=False)
# Always nothing
expected_output = {}
self.assertEqual(expected_output,
output,
msg=u'No Filter, exclude=False; Not working as expected')
def test_no_filter_exclude_true(self):
output = dict_tools.filter_dict(src_dict=self.input_dict,
filters=[],
exclude=True)
# Always Everything!
expected_output = {u"e1": {u"API": u"A", u"ENV": u"1", u"PARAMS": u"x, y, z"},
u"e2": {u"API": u"B", u"ENV": u"1", u"PARAMS": u"w, x"},
u"e3": {u"API": u"B", u"ENV": u"2", u"PARAMS": u"w, x"},
u"description1": u"Example environments parameters:",
u"description2": u"ENV - The environment to be used",
u"description3": u"ENV",
u"description4": [u"ENV", u"API", u"PARAM"],
u"description5": [u"API", u"PARAM"]
}
self.assertEqual(expected_output,
output,
msg=u'No Filter, exclude=True; Not working as expected')
def test_single_filter_exclude_false(self):
output = dict_tools.filter_dict(src_dict=self.input_dict,
filters=self.single_filter,
exclude=False)
expected_output = {u'e1': {u'API': u'A', u'PARAMS': u'x, y, z', u'ENV': u'1'},
u'e2': {u'API': u'B', u'PARAMS': u'w, x', u'ENV': u'1'},
}
self.assertEqual(expected_output,
output,
msg=u'Single Filter, exclude=False; Not working as expected')
def test_single_filter_exclude_true(self):
output = dict_tools.filter_dict(src_dict=self.input_dict,
filters=self.single_filter,
exclude=True)
expected_output = {u"e3": {u"API": u"B", u"ENV": u"2", u"PARAMS": u"w, x"},
u"description1": u"Example environments parameters:",
u"description2": u"ENV - The environment to be used",
u"description3": u"ENV",
u"description4": [u"ENV", u"API", u"PARAM"],
u"description5": [u"API", u"PARAM"]
}
self.assertEqual(expected_output,
output,
msg=u'Single Filter, exclude=True; Not working as expected')
def test_and_filter_exclude_false(self):
output = dict_tools.filter_dict(src_dict=self.input_dict,
filters=self.and_filter,
exclude=False)
expected_output = {u"e3": {u"API": u"B", u"ENV": u"2", u"PARAMS": u"w, x"}}
self.assertEqual(expected_output,
output,
msg=u'Multiple Filters (AND), exclude=False; Not working as expected')
def test_and_filter_exclude_true(self):
output = dict_tools.filter_dict(src_dict=self.input_dict,
filters=self.and_filter,
exclude=True)
expected_output = {u"e1": {u"API": u"A", u"ENV": u"1", u"PARAMS": u"x, y, z"},
u"e2": {u"API": u"B", u"ENV": u"1", u"PARAMS": u"w, x"},
u"description1": u"Example environments parameters:",
u"description2": u"ENV - The environment to be used",
u"description3": u"ENV",
u"description4": [u"ENV", u"API", u"PARAM"],
u"description5": [u"API", u"PARAM"]
}
self.assertEqual(expected_output,
output,
msg=u'Multiple Filters (AND), exclude=True; Not working as expected')
def test_or_filter_exclude_false(self):
output = dict_tools.filter_dict(src_dict=self.input_dict,
filters=self.or_filter,
exclude=False)
expected_output = {u"e2": {u"API": u"B", u"ENV": u"1", u"PARAMS": u"w, x"},
u"e3": {u"API": u"B", u"ENV": u"2", u"PARAMS": u"w, x"}
}
self.assertEqual(expected_output,
output,
msg=u'Multiple Filters (OR), exclude=False; Not working as expected')
def test_or_filter_exclude_true(self):
output = dict_tools.filter_dict(src_dict=self.input_dict,
filters=self.or_filter,
exclude=True)
expected_output = {u"e1": {u"API": u"A", u"ENV": u"1", u"PARAMS": u"x, y, z"},
u"description1": u"Example environments parameters:",
u"description2": u"ENV - The environment to be used",
u"description3": u"ENV",
u"description4": [u"ENV", u"API", u"PARAM"],
u"description5": [u"API", u"PARAM"]
}
self.assertEqual(expected_output,
output,
msg=u'Multiple Filters (OR), exclude=True; Not working as expected')
def test_multi_filter_exclude_false(self):
output = dict_tools.filter_dict(src_dict=self.input_dict,
filters=self.multi_filter,
exclude=False)
expected_output = {u"e1": {u"API": u"A", u"ENV": u"1", u"PARAMS": u"x, y, z"},
u"e3": {u"API": u"B", u"ENV": u"2", u"PARAMS": u"w, x"}
}
self.assertEqual(expected_output,
output,
msg=u'Multiple Filters (AND + OR), exclude=False; Not working as expected')
def test_multi_filter_exclude_true(self):
output = dict_tools.filter_dict(src_dict=self.input_dict,
filters=self.multi_filter,
exclude=True)
expected_output = {u"e2": {u"API": u"B", u"ENV": u"1", u"PARAMS": u"w, x"},
u"description1": u"Example environments parameters:",
u"description2": u"ENV - The environment to be used",
u"description3": u"ENV",
u"description4": [u"ENV", u"API", u"PARAM"],
u"description5": [u"API", u"PARAM"]
}
self.assertEqual(expected_output,
output,
msg=u'Multiple Filters (AND + OR), exclude=True; Not working as expected')
def test_multi_and_filter_exclude_false(self):
output = dict_tools.filter_dict(src_dict=self.input_dict,
filters=self.multi_and_filter,
exclude=False)
expected_output = {u"e2": {u"API": u"B", u"ENV": u"1", u"PARAMS": u"w, x"}}
self.assertEqual(expected_output,
output,
msg=u'Multiple Filters (AND + AND), exclude=False; Not working as expected')
def test_multi_and_filter_exclude_true(self):
output = dict_tools.filter_dict(src_dict=self.input_dict,
filters=self.multi_and_filter,
exclude=True)
expected_output = {u"e1": {u"API": u"A", u"ENV": u"1", u"PARAMS": u"x, y, z"},
u"e3": {u"API": u"B", u"ENV": u"2", u"PARAMS": u"w, x"},
u"description1": u"Example environments parameters:",
u"description2": u"ENV - The environment to be used",
u"description3": u"ENV",
u"description4": [u"ENV", u"API", u"PARAM"],
u"description5": [u"API", u"PARAM"]
}
self.assertEqual(expected_output,
output,
msg=u'Multiple Filters (AND + AND), exclude=True; Not working as expected')
def test_none_search_value_exclude_false(self):
output = dict_tools.filter_dict(src_dict=self.input_dict,
filters=self.single_filter_none,
exclude=False)
expected_output = {u"e1": {u"API": u"A", u"ENV": u"1", u"PARAMS": u"x, y, z"},
u"e2": {u"API": u"B", u"ENV": u"1", u"PARAMS": u"w, x"},
u"e3": {u"API": u"B", u"ENV": u"2", u"PARAMS": u"w, x"},
u"description3": u"ENV",
u"description4": [u"ENV", u"API", u"PARAM"],
}
self.assertEqual(expected_output,
output,
msg=u'None Search Value Filter, exclude=False; Not working as expected')
def test_none_search_value_exclude_true(self):
output = dict_tools.filter_dict(src_dict=self.input_dict,
filters=self.single_filter_none,
exclude=True)
expected_output = {u"description1": u"Example environments parameters:",
u"description2": u"ENV - The environment to be used",
u"description5": [u"API", u"PARAM"]
}
self.assertEqual(expected_output,
output,
msg=u'None Search Value Filter, exclude=True; Not working as expected')
# Unhappy path tests
def test_src_dict_wrong_type(self):
with self.assertRaises(AssertionError,
msg=u'src_dict wrong type assertion; Not working as expected'):
_ = dict_tools.filter_dict(src_dict=u'',
filters=[])
def test_filters_wrong_type(self):
with self.assertRaises(AssertionError,
msg=u'filters wrong type assertion; Not working as expected'):
_ = dict_tools.filter_dict(src_dict=self.input_dict,
filters=u'')
def test_first_filter_wrong_type(self):
with self.assertRaises(AssertionError,
msg=u'first filter wrong type assertion; Not working as expected'):
_ = dict_tools.filter_dict(src_dict=self.input_dict,
filters=[{u''}])
def test_other_filter_wrong_type(self):
with self.assertRaises(AssertionError,
msg=u'first filter wrong type assertion; Not working as expected'):
_ = dict_tools.filter_dict(src_dict=self.input_dict,
filters=[(u'ENV', u'1'), {u''}])
def test_first_filter_missing_param(self):
with self.assertRaises(AssertionError,
msg=u'First filter missing parameter; Not working as expected'):
_ = dict_tools.filter_dict(src_dict=self.input_dict,
filters=[(u'ENV', )])
def test_other_filter_missing_param(self):
with self.assertRaises(AssertionError,
msg=u'First filter missing parameter; Not working as expected'):
_ = dict_tools.filter_dict(src_dict=self.input_dict,
filters=[(u'ENV', u'1'), (u'API', u'A')])
def test_src_value(self):
src_dict = self.input_dict.copy()
src_dict[u'broken'] = tuple()
with self.assertRaises(TypeError,
msg=u'invlaid filter assertion; Not working as expected'):
_ = dict_tools.filter_dict(src_dict=src_dict,
filters=self.multi_filter)
class TestSortDict(unittest.TestCase):
def setUp(self):
self.input_dict = {
u"description5": [u"API", u"PARAM"],
u"description1": u"Example environments parameters:",
u"description3": u"ENV",
u"description4": [u"ENV", u"API", u"PARAM"],
u"description2": u"ENV - The environment to be used"
}
self.asc_dict = {
u"description1": u"Example environments parameters:",
u"description2": u"ENV - The environment to be used",
u"description3": u"ENV",
u"description4": [u"ENV", u"API", u"PARAM"],
u"description5": [u"API", u"PARAM"]
}
self.desc_dict = {
u"description5": [u"API", u"PARAM"],
u"description4": [u"ENV", u"API", u"PARAM"],
u"description3": u"ENV",
u"description2": u"ENV - The environment to be used",
u"description1": u"Example environments parameters:"
}
def tearDown(self):
pass
# happy path tests
def test_sort_ascending(self):
output = dict_tools.sort_dict(src_dict=self.input_dict)
self.assertEqual(self.asc_dict,
output,
msg=u'Sort ascending; Not working as expected')
def test_sort_descending(self):
output = dict_tools.sort_dict(src_dict=self.input_dict,
descending=True)
self.assertEqual(self.desc_dict,
output,
msg=u'Sort descending; Not working as expected')
class TestRecursiveUpdate(unittest.TestCase):
def setUp(self):
self.current_data = {
u'dummy': u'some_data',
u'dummy_list': [
u'item1',
u'item2',
u'item3'
],
u'dummy_dict': {
u'a': 1,
u'b': 2,
u'c': 3
}
}
self.updated_data = {
u'dummy': u'updated_data',
u'dummy_list': [
u'item4'
],
u'dummy_dict': {
u'b': 999,
u'd': 4
}
}
# happy path tests
def test_inherit_method(self):
expected_output = {
u'dummy': u'updated_data',
u'dummy_list': [
u'item4'
],
u'dummy_dict': {
u'a': 1,
u'b': 999,
u'c': 3,
u'd': 4
}
}
self.assertDictEqual(expected_output,
dict_tools.recursive_update(self.current_data, self.updated_data),
u'recursive_update method failed')
| 41.004926
| 102
| 0.472729
| 1,863
| 16,648
| 4.078905
| 0.064412
| 0.034741
| 0.034873
| 0.060534
| 0.895644
| 0.86893
| 0.851823
| 0.838137
| 0.829056
| 0.747598
| 0
| 0.013036
| 0.405574
| 16,648
| 405
| 103
| 41.106173
| 0.75485
| 0.007148
| 0
| 0.607973
| 0
| 0
| 0.222868
| 0
| 0
| 0
| 0
| 0
| 0.096346
| 1
| 0.096346
| false
| 0.006645
| 0.006645
| 0
| 0.112957
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
941965c55aee36c19360b76286226606df4dfc20
| 70
|
py
|
Python
|
simulation/TaylorCouette.py
|
ajupatatero/neurasim
|
c1d3f8163a7389b06a13e453daa98ad5157d9b2e
|
[
"MIT"
] | null | null | null |
simulation/TaylorCouette.py
|
ajupatatero/neurasim
|
c1d3f8163a7389b06a13e453daa98ad5157d9b2e
|
[
"MIT"
] | null | null | null |
simulation/TaylorCouette.py
|
ajupatatero/neurasim
|
c1d3f8163a7389b06a13e453daa98ad5157d9b2e
|
[
"MIT"
] | null | null | null |
from .Simulation import *
class TaylorCouette(Simulation):
pass
| 11.666667
| 32
| 0.742857
| 7
| 70
| 7.428571
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.185714
| 70
| 6
| 33
| 11.666667
| 0.912281
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
941fd7251a03f16df60cfac4e102fa3ce732b0c4
| 26
|
py
|
Python
|
stompy/model/otps/__init__.py
|
oneconcern/stompy
|
d2cb86e7d1a2de698701b8d1b391e27e1ee935c0
|
[
"MIT"
] | 17
|
2017-10-12T14:53:25.000Z
|
2022-02-26T01:24:52.000Z
|
stompy/model/otps/__init__.py
|
oneconcern/stompy
|
d2cb86e7d1a2de698701b8d1b391e27e1ee935c0
|
[
"MIT"
] | 6
|
2018-03-12T12:43:14.000Z
|
2021-09-04T17:44:31.000Z
|
stompy/model/otps/__init__.py
|
rustychris/stompy
|
4efb78824804edc68555bced275e37842f98ba1f
|
[
"MIT"
] | 6
|
2017-09-29T21:20:11.000Z
|
2020-09-28T21:29:23.000Z
|
from .otps_model import *
| 13
| 25
| 0.769231
| 4
| 26
| 4.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 26
| 1
| 26
| 26
| 0.863636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
946c648cb49b15e97b1294dfa7a6135ac198ea6e
| 4,488
|
py
|
Python
|
python-trunk/sfapi2/sflib/crypto.py
|
raychorn/svn_molten-magma
|
8aa2ff2340707eecae6514943e86f5afba9cd54a
|
[
"CC0-1.0"
] | null | null | null |
python-trunk/sfapi2/sflib/crypto.py
|
raychorn/svn_molten-magma
|
8aa2ff2340707eecae6514943e86f5afba9cd54a
|
[
"CC0-1.0"
] | null | null | null |
python-trunk/sfapi2/sflib/crypto.py
|
raychorn/svn_molten-magma
|
8aa2ff2340707eecae6514943e86f5afba9cd54a
|
[
"CC0-1.0"
] | null | null | null |
def seedPassword():
import random
s = ''.join([chr(random.randint(0,254)) for ch in xrange(1024)])
print 's=[%s]' % asReadableData(s)
return s
s = [153,26,153,218,247,171,33,97,169,94,55,193,122,155,164,197,113,47,94,78,56,27,122,34,72,144,156,130,230,35,63,125,122,253,216,151,225,157,3,253,55,173,162,214,72,181,22,40,107,3,191,151,80,70,218,205,178,164,102,253,177,220,183,247,37,87,162,244,84,148,46,18,202,240,196,165,236,225,28,16,170,136,146,84,167,62,105,222,185,220,27,82,109,191,165,245,18,254,67,203,176,49,136,225,38,134,21,124,223,44,76,72,112,125,229,238,78,16,224,180,31,53,35,69,44,161,93,118,95,37,93,47,48,182,6,35,12,176,183,71,90,5,107,205,92,45,21,229,249,214,224,130,143,4,11,133,233,6,90,44,163,48,126,141,28,246,66,114,138,180,229,35,119,206,152,231,33,39,116,234,108,39,140,163,154,39,61,243,6,87,136,67,195,171,248,78,13,6,215,204,196,43,109,96,143,203,132,176,193,218,205,241,252,94,71,80,187,81,155,219,31,196,72,207,40,39,253,116,239,198,18,18,121,106,3,252,88,235,196,219,223,173,102,194,9,250,18,81,101,56,67,154,40,1,119,172,195,120,169,120,219,12,14,111,161,226,224,156,69,174,78,147,251,208,248,221,77,112,131,
157,44,240,168,48,220,22,238,161,18,92,192,219,188,213,41,168,56,52,61,124,26,164,240,11,75,238,209,10,32,152,192,29,177,221,242,78,184,154,3,27,229,164,199,10,233,32,204,7,90,71,46,168,151,4,172,72,125,27,87,171,235,27,33,47,142,22,110,252,9,0,41,28,124,71,174,237,249,117,22,165,135,15,150,88,9,185,80,198,213,182,234,220,173,233,159,204,212,226,236,20,206,40,105,91,143,188,100,89,47,27,123,77,245,225,23,18,175,81,104,130,118,218,192,99,93,230,130,65,70,166,23,13,171,104,209,29,178,42,219,151,186,249,135,216,31,240,111,4,114,104,29,104,30,104,66,74,80,243,248,137,169,18,128,113,113,171,24,229,152,150,119,144,210,49,31,94,202,193,164,136,53,28,82,239,42,88,206,64,33,21,150,239,24,248,184,75,138,6,212,137,242,85,200,74,193,225,59,206,168,53,236,112,166,26,138,55,205,14,59,216,200,187,209,53,117,9,118,224,83,17,140,189,160,66,46,157,164,181,102,87,96,192,151,208,109,198,179,59,138,147,115,92,110,50,77,43,64,75,172,248,182,161,194,186,18,228,212,200,131,213,192,84,110,119,84,246,205,69,57,
133,117,31,236,213,244,231,42,92,110,50,217,175,228,210,157,152,9,35,199,182,149,85,193,250,244,194,114,7,35,51,69,54,77,196,120,211,92,184,136,201,61,24,192,49,252,205,104,109,201,31,193,141,198,102,233,229,237,225,190,229,126,157,147,7,214,77,116,241,3,217,72,86,8,150,220,212,223,144,19,248,19,68,168,155,90,252,7,56,136,8,226,19,179,131,155,185,214,23,76,30,211,43,62,7,220,143,104,55,117,135,215,234,40,113,1,224,2,61,206,206,103,60,82,174,6,188,148,119,44,180,227,241,189,85,95,55,218,191,251,217,150,223,236,128,171,73,210,218,219,101,198,178,6,185,88,75,27,53,226,122,80,153,152,8,138,217,65,211,85,37,223,16,137,160,37,248,130,174,24,79,91,220,162,112,213,29,137,181,39,122,172,172,185,253,43,190,203,62,195,99,168,186,156,246,100,245,172,191,77,149,30,103,25,51,198,16,214,143,25,5,99,173,238,204,102,199,0,217,130,143,149,189,172,241,174,139,119,221,230,51,178,81,195,249,172,223,71,110,69,6,19,66,205,226,55,218,139,114,244,55,89,122,48,97,250,140,173,238,22,254,73,78,180,236,8,205,135,
95,84,237,182,80,49,70,172,126,34,253,39,18,23,25,196,43,240,114,95,236,171,164,166,20,42,192,81,52,60,61,204,57,22,3,83,150,186,42,15,216,145,138,69,81,31,31,84,99,41,78,86,198,129,22,241,98,226,157,215,79,64,138,74,24,69,25,6,230,91,44,109,207,160,92,41,239,220,103,12,171,46,69,69,25,17,247,87,126,171,2,32,235,170,2,55,15,226,186,213,129,43,91,236,112,167,122,212,222,32,52,209,17,130,157,122,159,61,97,114,254,217,174,87,34,176,180,229,215,226,94,114,145,252,233,115,106,148,30,62,58,247,92,93,87,208,65,120,21,222,175,205,127,159,78,240,231,227,53,42,91,240,50,249,252,85,180,11,92,120,62,83,181,19,230,73,239,132,227,135,238,49,112,239,98,187,228]
_cipher_password = ''.join([chr(ch) for ch in s])
#_cipher_password = seedPassword()
def encryptData(data):
from Crypto.Cipher import Blowfish
cObj = Blowfish.new(_cipher_password, Blowfish.MODE_ECB)
m = len(data)
n = 8-divmod(m,8)[-1]
data += '\0'*n
mm = len(data)
eData = cObj.encrypt(data)
sData = cObj.decrypt(eData)
assert sData == data, 'Oops, something went wrong in "%s".' % _utils.funcName()
return eData
def decryptData(data):
from Crypto.Cipher import Blowfish
cObj = Blowfish.new(_cipher_password, Blowfish.MODE_ECB)
sData = cObj.decrypt(data)
return sData
| 140.25
| 1,004
| 0.70098
| 1,134
| 4,488
| 2.76455
| 0.261023
| 0.017863
| 0.004466
| 0.012759
| 0.049761
| 0.049761
| 0.049761
| 0.049761
| 0.049761
| 0.049761
| 0
| 0.616122
| 0.046346
| 4,488
| 31
| 1,005
| 144.774194
| 0.116355
| 0.007353
| 0
| 0.153846
| 0
| 0
| 0.009722
| 0
| 0
| 0
| 0
| 0
| 0.038462
| 0
| null | null | 0.153846
| 0.115385
| null | null | 0.038462
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
846a7b076683ae5c85f8f957aefc556b1d044679
| 289
|
py
|
Python
|
torchmeta/toy/__init__.py
|
kylehkhsu/pytorch-meta
|
69b6577782b52f958a5ac6d79fc193c53509863d
|
[
"MIT"
] | 2
|
2020-10-28T03:42:12.000Z
|
2020-10-28T19:52:35.000Z
|
torchmeta/toy/__init__.py
|
kylehkhsu/pytorch-meta
|
69b6577782b52f958a5ac6d79fc193c53509863d
|
[
"MIT"
] | null | null | null |
torchmeta/toy/__init__.py
|
kylehkhsu/pytorch-meta
|
69b6577782b52f958a5ac6d79fc193c53509863d
|
[
"MIT"
] | null | null | null |
from torchmeta.toy.harmonic import Harmonic
from torchmeta.toy.sinusoid import Sinusoid
from torchmeta.toy.sinusoid_line import SinusoidAndLine
from torchmeta.toy.relu import Relu
from torchmeta.toy import helpers
__all__ = ['Harmonic', 'Sinusoid', 'SinusoidAndLine', 'Relu', 'helpers']
| 32.111111
| 72
| 0.809689
| 36
| 289
| 6.361111
| 0.305556
| 0.283843
| 0.349345
| 0.209607
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.100346
| 289
| 8
| 73
| 36.125
| 0.880769
| 0
| 0
| 0
| 0
| 0
| 0.145329
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.833333
| 0
| 0.833333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ca817f1326a80c22b209670bc885354945b554ac
| 37,892
|
py
|
Python
|
instances/passenger_demand/pas-20210421-2109-int14000000000000001e/6.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
instances/passenger_demand/pas-20210421-2109-int14000000000000001e/6.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
instances/passenger_demand/pas-20210421-2109-int14000000000000001e/6.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
"""
PASSENGERS
"""
numPassengers = 3262
passenger_arriving = (
(3, 6, 4, 4, 3, 0, 2, 7, 3, 3, 0, 0), # 0
(3, 7, 9, 2, 1, 0, 3, 12, 1, 6, 2, 0), # 1
(3, 10, 6, 3, 0, 0, 6, 6, 7, 5, 0, 0), # 2
(5, 6, 12, 6, 0, 0, 8, 9, 3, 5, 2, 0), # 3
(3, 10, 4, 1, 1, 0, 9, 8, 5, 3, 0, 0), # 4
(4, 8, 3, 6, 2, 0, 5, 7, 7, 4, 0, 0), # 5
(3, 8, 9, 5, 4, 0, 4, 10, 10, 5, 3, 0), # 6
(5, 6, 6, 3, 1, 0, 6, 5, 9, 6, 1, 0), # 7
(5, 7, 8, 4, 1, 0, 5, 11, 7, 2, 2, 0), # 8
(5, 9, 10, 2, 1, 0, 4, 8, 5, 3, 2, 0), # 9
(4, 7, 11, 1, 0, 0, 4, 12, 6, 6, 3, 0), # 10
(7, 14, 3, 3, 8, 0, 6, 8, 9, 1, 3, 0), # 11
(5, 8, 1, 6, 1, 0, 5, 2, 7, 8, 0, 0), # 12
(1, 10, 8, 4, 6, 0, 9, 6, 10, 2, 1, 0), # 13
(4, 8, 13, 4, 3, 0, 5, 14, 8, 7, 2, 0), # 14
(1, 12, 12, 4, 2, 0, 9, 7, 4, 7, 2, 0), # 15
(3, 8, 6, 1, 1, 0, 5, 7, 4, 6, 1, 0), # 16
(4, 11, 8, 1, 0, 0, 8, 12, 2, 4, 2, 0), # 17
(3, 9, 5, 3, 3, 0, 7, 11, 6, 3, 1, 0), # 18
(1, 7, 12, 6, 2, 0, 8, 12, 10, 7, 4, 0), # 19
(3, 10, 8, 4, 1, 0, 6, 10, 7, 5, 3, 0), # 20
(5, 13, 9, 3, 3, 0, 8, 11, 4, 3, 4, 0), # 21
(4, 8, 5, 2, 1, 0, 7, 6, 9, 3, 2, 0), # 22
(6, 15, 6, 7, 3, 0, 4, 12, 4, 6, 1, 0), # 23
(3, 12, 9, 5, 2, 0, 11, 12, 11, 4, 1, 0), # 24
(2, 10, 3, 5, 2, 0, 2, 11, 8, 14, 5, 0), # 25
(2, 8, 11, 3, 2, 0, 4, 11, 4, 1, 2, 0), # 26
(2, 11, 9, 4, 4, 0, 7, 11, 11, 1, 3, 0), # 27
(5, 11, 8, 3, 2, 0, 3, 11, 6, 5, 5, 0), # 28
(3, 12, 4, 2, 1, 0, 7, 7, 6, 10, 0, 0), # 29
(4, 12, 6, 0, 2, 0, 3, 12, 3, 8, 2, 0), # 30
(5, 7, 11, 2, 4, 0, 5, 5, 12, 5, 2, 0), # 31
(6, 11, 4, 4, 3, 0, 6, 7, 7, 5, 1, 0), # 32
(9, 12, 7, 4, 3, 0, 8, 12, 8, 7, 2, 0), # 33
(6, 5, 13, 4, 2, 0, 8, 5, 3, 2, 1, 0), # 34
(2, 5, 9, 5, 1, 0, 4, 13, 6, 7, 2, 0), # 35
(3, 9, 6, 9, 3, 0, 9, 8, 9, 2, 2, 0), # 36
(4, 9, 10, 3, 0, 0, 9, 6, 5, 4, 4, 0), # 37
(10, 12, 10, 3, 2, 0, 8, 15, 4, 4, 1, 0), # 38
(3, 11, 2, 6, 6, 0, 7, 10, 9, 3, 4, 0), # 39
(1, 12, 8, 4, 4, 0, 4, 6, 6, 3, 1, 0), # 40
(4, 13, 7, 4, 3, 0, 6, 15, 7, 3, 2, 0), # 41
(7, 8, 9, 1, 2, 0, 6, 9, 10, 3, 2, 0), # 42
(3, 14, 9, 5, 4, 0, 4, 7, 3, 5, 2, 0), # 43
(6, 12, 8, 6, 3, 0, 5, 13, 3, 5, 3, 0), # 44
(2, 11, 8, 1, 2, 0, 4, 8, 4, 5, 1, 0), # 45
(3, 11, 8, 7, 2, 0, 6, 7, 7, 2, 2, 0), # 46
(4, 12, 9, 6, 4, 0, 7, 11, 5, 4, 4, 0), # 47
(4, 10, 6, 2, 3, 0, 5, 10, 4, 5, 3, 0), # 48
(2, 7, 8, 8, 1, 0, 8, 8, 5, 6, 4, 0), # 49
(6, 8, 0, 4, 1, 0, 5, 5, 7, 5, 4, 0), # 50
(3, 10, 9, 1, 2, 0, 5, 6, 4, 3, 5, 0), # 51
(5, 5, 4, 3, 2, 0, 1, 18, 6, 4, 0, 0), # 52
(3, 10, 10, 3, 3, 0, 11, 8, 4, 5, 1, 0), # 53
(5, 3, 9, 4, 0, 0, 4, 10, 8, 4, 6, 0), # 54
(4, 5, 6, 6, 1, 0, 9, 6, 9, 10, 2, 0), # 55
(3, 9, 5, 3, 3, 0, 9, 6, 3, 5, 2, 0), # 56
(4, 11, 10, 3, 2, 0, 6, 13, 6, 4, 6, 0), # 57
(1, 16, 13, 2, 1, 0, 4, 10, 8, 6, 3, 0), # 58
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # 59
)
station_arriving_intensity = (
(3.7095121817383676, 9.515044981060607, 11.19193043059126, 8.87078804347826, 10.000240384615385, 6.659510869565219), # 0
(3.7443308140669203, 9.620858238197952, 11.252381752534994, 8.920190141908213, 10.075193108974359, 6.657240994867151), # 1
(3.7787518681104277, 9.725101964085297, 11.31139817195087, 8.968504830917876, 10.148564102564103, 6.654901690821256), # 2
(3.8127461259877085, 9.827663671875001, 11.368936576156813, 9.01569089673913, 10.22028605769231, 6.652493274456523), # 3
(3.8462843698175795, 9.928430874719417, 11.424953852470724, 9.061707125603865, 10.290291666666668, 6.6500160628019325), # 4
(3.879337381718857, 10.027291085770905, 11.479406888210512, 9.106512303743962, 10.358513621794872, 6.647470372886473), # 5
(3.9118759438103607, 10.12413181818182, 11.53225257069409, 9.150065217391306, 10.424884615384617, 6.644856521739131), # 6
(3.943870838210907, 10.218840585104518, 11.58344778723936, 9.19232465277778, 10.489337339743592, 6.64217482638889), # 7
(3.975292847039314, 10.311304899691358, 11.632949425164242, 9.233249396135266, 10.551804487179488, 6.639425603864735), # 8
(4.006112752414399, 10.401412275094698, 11.680714371786634, 9.272798233695653, 10.61221875, 6.636609171195653), # 9
(4.03630133645498, 10.489050224466892, 11.72669951442445, 9.310929951690824, 10.670512820512823, 6.633725845410628), # 10
(4.065829381279876, 10.5741062609603, 11.7708617403956, 9.347603336352659, 10.726619391025642, 6.630775943538648), # 11
(4.094667669007903, 10.656467897727273, 11.813157937017996, 9.382777173913043, 10.780471153846154, 6.627759782608695), # 12
(4.122786981757876, 10.736022647920176, 11.85354499160954, 9.416410250603866, 10.832000801282053, 6.624677679649759), # 13
(4.15015810164862, 10.81265802469136, 11.891979791488144, 9.448461352657004, 10.881141025641025, 6.621529951690821), # 14
(4.1767518107989465, 10.886261541193182, 11.928419223971721, 9.478889266304348, 10.92782451923077, 6.618316915760871), # 15
(4.202538891327675, 10.956720710578002, 11.96282017637818, 9.507652777777778, 10.971983974358976, 6.61503888888889), # 16
(4.227490125353625, 11.023923045998176, 11.995139536025421, 9.53471067330918, 11.013552083333336, 6.611696188103866), # 17
(4.25157629499561, 11.087756060606061, 12.025334190231364, 9.560021739130436, 11.052461538461543, 6.608289130434783), # 18
(4.274768182372451, 11.148107267554012, 12.053361026313912, 9.58354476147343, 11.088645032051284, 6.604818032910629), # 19
(4.297036569602966, 11.204864179994388, 12.079176931590974, 9.60523852657005, 11.122035256410259, 6.601283212560387), # 20
(4.318352238805971, 11.257914311079544, 12.102738793380466, 9.625061820652174, 11.152564903846153, 6.597684986413044), # 21
(4.338685972100283, 11.307145173961842, 12.124003499000287, 9.642973429951692, 11.180166666666667, 6.5940236714975855), # 22
(4.358008551604722, 11.352444281793632, 12.142927935768354, 9.658932140700484, 11.204773237179488, 6.590299584842997), # 23
(4.3762907594381035, 11.393699147727272, 12.159468991002571, 9.672896739130437, 11.226317307692307, 6.586513043478261), # 24
(4.393503377719247, 11.430797284915124, 12.173583552020853, 9.684826011473431, 11.244731570512819, 6.582664364432368), # 25
(4.409617188566969, 11.46362620650954, 12.185228506141103, 9.694678743961353, 11.259948717948719, 6.5787538647343), # 26
(4.424602974100088, 11.492073425662877, 12.194360740681233, 9.702413722826089, 11.271901442307694, 6.574781861413045), # 27
(4.438431516437421, 11.516026455527497, 12.200937142959157, 9.707989734299519, 11.280522435897437, 6.570748671497586), # 28
(4.4510735976977855, 11.535372809255753, 12.204914600292774, 9.711365564613528, 11.285744391025641, 6.566654612016909), # 29
(4.4625, 11.55, 12.20625, 9.7125, 11.287500000000001, 6.562500000000001), # 30
(4.47319183983376, 11.56215031960227, 12.205248928140096, 9.712295118464054, 11.286861125886526, 6.556726763701484), # 31
(4.4836528452685425, 11.574140056818184, 12.202274033816424, 9.711684477124184, 11.28495815602837, 6.547834661835751), # 32
(4.493887715792838, 11.585967720170455, 12.197367798913046, 9.710674080882354, 11.281811569148937, 6.535910757121439), # 33
(4.503901150895141, 11.597631818181819, 12.19057270531401, 9.709269934640524, 11.277441843971632, 6.521042112277196), # 34
(4.513697850063939, 11.609130859374998, 12.181931234903383, 9.707478043300654, 11.27186945921986, 6.503315790021656), # 35
(4.523282512787724, 11.62046335227273, 12.171485869565219, 9.705304411764708, 11.265114893617023, 6.482818853073463), # 36
(4.532659838554988, 11.631627805397729, 12.159279091183576, 9.70275504493464, 11.257198625886524, 6.4596383641512585), # 37
(4.5418345268542195, 11.642622727272729, 12.145353381642513, 9.699835947712419, 11.248141134751775, 6.433861385973679), # 38
(4.5508112771739135, 11.653446626420456, 12.129751222826087, 9.696553125000001, 11.23796289893617, 6.40557498125937), # 39
(4.559594789002558, 11.664098011363638, 12.11251509661836, 9.692912581699348, 11.22668439716312, 6.37486621272697), # 40
(4.568189761828645, 11.674575390625, 12.093687484903382, 9.68892032271242, 11.214326108156028, 6.34182214309512), # 41
(4.576600895140665, 11.684877272727276, 12.07331086956522, 9.684582352941177, 11.2009085106383, 6.3065298350824595), # 42
(4.584832888427111, 11.69500216619318, 12.051427732487923, 9.679904677287583, 11.186452083333334, 6.26907635140763), # 43
(4.592890441176471, 11.704948579545455, 12.028080555555556, 9.674893300653595, 11.17097730496454, 6.229548754789272), # 44
(4.600778252877237, 11.714715021306818, 12.003311820652177, 9.669554227941177, 11.15450465425532, 6.188034107946028), # 45
(4.6085010230179035, 11.724300000000003, 11.97716400966184, 9.663893464052288, 11.137054609929079, 6.144619473596536), # 46
(4.616063451086957, 11.733702024147728, 11.9496796044686, 9.65791701388889, 11.118647650709221, 6.099391914459438), # 47
(4.623470236572891, 11.742919602272728, 11.920901086956523, 9.651630882352942, 11.099304255319149, 6.052438493253375), # 48
(4.630726078964194, 11.751951242897727, 11.890870939009663, 9.645041074346407, 11.079044902482272, 6.003846272696985), # 49
(4.6378356777493615, 11.760795454545454, 11.85963164251208, 9.638153594771243, 11.057890070921987, 5.953702315508913), # 50
(4.6448037324168805, 11.769450745738636, 11.827225679347826, 9.630974448529413, 11.035860239361703, 5.902093684407797), # 51
(4.651634942455243, 11.777915625, 11.793695531400965, 9.623509640522876, 11.012975886524824, 5.849107442112278), # 52
(4.658334007352941, 11.786188600852274, 11.759083680555555, 9.615765175653596, 10.989257491134753, 5.794830651340996), # 53
(4.6649056265984665, 11.79426818181818, 11.723432608695653, 9.60774705882353, 10.964725531914894, 5.739350374812594), # 54
(4.671354499680307, 11.802152876420456, 11.686784797705313, 9.599461294934642, 10.939400487588653, 5.682753675245711), # 55
(4.677685326086957, 11.809841193181818, 11.649182729468599, 9.59091388888889, 10.913302836879433, 5.625127615358988), # 56
(4.683902805306906, 11.817331640625003, 11.610668885869565, 9.582110845588236, 10.886453058510638, 5.566559257871065), # 57
(4.690011636828645, 11.824622727272727, 11.57128574879227, 9.573058169934642, 10.858871631205675, 5.507135665500583), # 58
(0.0, 0.0, 0.0, 0.0, 0.0, 0.0), # 59
)
passenger_arriving_acc = (
(3, 6, 4, 4, 3, 0, 2, 7, 3, 3, 0, 0), # 0
(6, 13, 13, 6, 4, 0, 5, 19, 4, 9, 2, 0), # 1
(9, 23, 19, 9, 4, 0, 11, 25, 11, 14, 2, 0), # 2
(14, 29, 31, 15, 4, 0, 19, 34, 14, 19, 4, 0), # 3
(17, 39, 35, 16, 5, 0, 28, 42, 19, 22, 4, 0), # 4
(21, 47, 38, 22, 7, 0, 33, 49, 26, 26, 4, 0), # 5
(24, 55, 47, 27, 11, 0, 37, 59, 36, 31, 7, 0), # 6
(29, 61, 53, 30, 12, 0, 43, 64, 45, 37, 8, 0), # 7
(34, 68, 61, 34, 13, 0, 48, 75, 52, 39, 10, 0), # 8
(39, 77, 71, 36, 14, 0, 52, 83, 57, 42, 12, 0), # 9
(43, 84, 82, 37, 14, 0, 56, 95, 63, 48, 15, 0), # 10
(50, 98, 85, 40, 22, 0, 62, 103, 72, 49, 18, 0), # 11
(55, 106, 86, 46, 23, 0, 67, 105, 79, 57, 18, 0), # 12
(56, 116, 94, 50, 29, 0, 76, 111, 89, 59, 19, 0), # 13
(60, 124, 107, 54, 32, 0, 81, 125, 97, 66, 21, 0), # 14
(61, 136, 119, 58, 34, 0, 90, 132, 101, 73, 23, 0), # 15
(64, 144, 125, 59, 35, 0, 95, 139, 105, 79, 24, 0), # 16
(68, 155, 133, 60, 35, 0, 103, 151, 107, 83, 26, 0), # 17
(71, 164, 138, 63, 38, 0, 110, 162, 113, 86, 27, 0), # 18
(72, 171, 150, 69, 40, 0, 118, 174, 123, 93, 31, 0), # 19
(75, 181, 158, 73, 41, 0, 124, 184, 130, 98, 34, 0), # 20
(80, 194, 167, 76, 44, 0, 132, 195, 134, 101, 38, 0), # 21
(84, 202, 172, 78, 45, 0, 139, 201, 143, 104, 40, 0), # 22
(90, 217, 178, 85, 48, 0, 143, 213, 147, 110, 41, 0), # 23
(93, 229, 187, 90, 50, 0, 154, 225, 158, 114, 42, 0), # 24
(95, 239, 190, 95, 52, 0, 156, 236, 166, 128, 47, 0), # 25
(97, 247, 201, 98, 54, 0, 160, 247, 170, 129, 49, 0), # 26
(99, 258, 210, 102, 58, 0, 167, 258, 181, 130, 52, 0), # 27
(104, 269, 218, 105, 60, 0, 170, 269, 187, 135, 57, 0), # 28
(107, 281, 222, 107, 61, 0, 177, 276, 193, 145, 57, 0), # 29
(111, 293, 228, 107, 63, 0, 180, 288, 196, 153, 59, 0), # 30
(116, 300, 239, 109, 67, 0, 185, 293, 208, 158, 61, 0), # 31
(122, 311, 243, 113, 70, 0, 191, 300, 215, 163, 62, 0), # 32
(131, 323, 250, 117, 73, 0, 199, 312, 223, 170, 64, 0), # 33
(137, 328, 263, 121, 75, 0, 207, 317, 226, 172, 65, 0), # 34
(139, 333, 272, 126, 76, 0, 211, 330, 232, 179, 67, 0), # 35
(142, 342, 278, 135, 79, 0, 220, 338, 241, 181, 69, 0), # 36
(146, 351, 288, 138, 79, 0, 229, 344, 246, 185, 73, 0), # 37
(156, 363, 298, 141, 81, 0, 237, 359, 250, 189, 74, 0), # 38
(159, 374, 300, 147, 87, 0, 244, 369, 259, 192, 78, 0), # 39
(160, 386, 308, 151, 91, 0, 248, 375, 265, 195, 79, 0), # 40
(164, 399, 315, 155, 94, 0, 254, 390, 272, 198, 81, 0), # 41
(171, 407, 324, 156, 96, 0, 260, 399, 282, 201, 83, 0), # 42
(174, 421, 333, 161, 100, 0, 264, 406, 285, 206, 85, 0), # 43
(180, 433, 341, 167, 103, 0, 269, 419, 288, 211, 88, 0), # 44
(182, 444, 349, 168, 105, 0, 273, 427, 292, 216, 89, 0), # 45
(185, 455, 357, 175, 107, 0, 279, 434, 299, 218, 91, 0), # 46
(189, 467, 366, 181, 111, 0, 286, 445, 304, 222, 95, 0), # 47
(193, 477, 372, 183, 114, 0, 291, 455, 308, 227, 98, 0), # 48
(195, 484, 380, 191, 115, 0, 299, 463, 313, 233, 102, 0), # 49
(201, 492, 380, 195, 116, 0, 304, 468, 320, 238, 106, 0), # 50
(204, 502, 389, 196, 118, 0, 309, 474, 324, 241, 111, 0), # 51
(209, 507, 393, 199, 120, 0, 310, 492, 330, 245, 111, 0), # 52
(212, 517, 403, 202, 123, 0, 321, 500, 334, 250, 112, 0), # 53
(217, 520, 412, 206, 123, 0, 325, 510, 342, 254, 118, 0), # 54
(221, 525, 418, 212, 124, 0, 334, 516, 351, 264, 120, 0), # 55
(224, 534, 423, 215, 127, 0, 343, 522, 354, 269, 122, 0), # 56
(228, 545, 433, 218, 129, 0, 349, 535, 360, 273, 128, 0), # 57
(229, 561, 446, 220, 130, 0, 353, 545, 368, 279, 131, 0), # 58
(229, 561, 446, 220, 130, 0, 353, 545, 368, 279, 131, 0), # 59
)
passenger_arriving_rate = (
(3.7095121817383676, 7.612035984848484, 6.715158258354756, 3.5483152173913037, 2.000048076923077, 0.0, 6.659510869565219, 8.000192307692307, 5.322472826086956, 4.476772172236504, 1.903008996212121, 0.0), # 0
(3.7443308140669203, 7.696686590558361, 6.751429051520996, 3.5680760567632848, 2.0150386217948717, 0.0, 6.657240994867151, 8.060154487179487, 5.352114085144928, 4.500952701013997, 1.9241716476395903, 0.0), # 1
(3.7787518681104277, 7.780081571268237, 6.786838903170522, 3.58740193236715, 2.0297128205128203, 0.0, 6.654901690821256, 8.118851282051281, 5.381102898550726, 4.524559268780347, 1.9450203928170593, 0.0), # 2
(3.8127461259877085, 7.8621309375, 6.821361945694087, 3.6062763586956517, 2.044057211538462, 0.0, 6.652493274456523, 8.176228846153847, 5.409414538043478, 4.547574630462725, 1.965532734375, 0.0), # 3
(3.8462843698175795, 7.942744699775533, 6.854972311482434, 3.624682850241546, 2.0580583333333333, 0.0, 6.6500160628019325, 8.232233333333333, 5.437024275362319, 4.569981540988289, 1.9856861749438832, 0.0), # 4
(3.879337381718857, 8.021832868616723, 6.887644132926307, 3.6426049214975844, 2.0717027243589743, 0.0, 6.647470372886473, 8.286810897435897, 5.463907382246377, 4.591762755284204, 2.005458217154181, 0.0), # 5
(3.9118759438103607, 8.099305454545455, 6.919351542416455, 3.660026086956522, 2.084976923076923, 0.0, 6.644856521739131, 8.339907692307692, 5.490039130434783, 4.612901028277636, 2.0248263636363637, 0.0), # 6
(3.943870838210907, 8.175072468083613, 6.950068672343615, 3.6769298611111116, 2.0978674679487184, 0.0, 6.64217482638889, 8.391469871794873, 5.515394791666668, 4.633379114895743, 2.043768117020903, 0.0), # 7
(3.975292847039314, 8.249043919753085, 6.979769655098544, 3.693299758454106, 2.1103608974358976, 0.0, 6.639425603864735, 8.44144358974359, 5.5399496376811594, 4.653179770065696, 2.062260979938271, 0.0), # 8
(4.006112752414399, 8.321129820075758, 7.00842862307198, 3.709119293478261, 2.12244375, 0.0, 6.636609171195653, 8.489775, 5.563678940217391, 4.672285748714653, 2.0802824550189394, 0.0), # 9
(4.03630133645498, 8.391240179573513, 7.03601970865467, 3.724371980676329, 2.134102564102564, 0.0, 6.633725845410628, 8.536410256410257, 5.586557971014494, 4.690679805769779, 2.0978100448933783, 0.0), # 10
(4.065829381279876, 8.459285008768239, 7.06251704423736, 3.739041334541063, 2.145323878205128, 0.0, 6.630775943538648, 8.581295512820512, 5.608562001811595, 4.70834469615824, 2.1148212521920597, 0.0), # 11
(4.094667669007903, 8.525174318181818, 7.087894762210797, 3.7531108695652167, 2.156094230769231, 0.0, 6.627759782608695, 8.624376923076923, 5.6296663043478254, 4.725263174807198, 2.1312935795454546, 0.0), # 12
(4.122786981757876, 8.58881811833614, 7.112126994965724, 3.766564100241546, 2.1664001602564102, 0.0, 6.624677679649759, 8.665600641025641, 5.649846150362319, 4.741417996643816, 2.147204529584035, 0.0), # 13
(4.15015810164862, 8.650126419753088, 7.135187874892886, 3.779384541062801, 2.1762282051282047, 0.0, 6.621529951690821, 8.704912820512819, 5.669076811594202, 4.756791916595257, 2.162531604938272, 0.0), # 14
(4.1767518107989465, 8.709009232954545, 7.157051534383032, 3.7915557065217387, 2.1855649038461538, 0.0, 6.618316915760871, 8.742259615384615, 5.6873335597826085, 4.771367689588688, 2.177252308238636, 0.0), # 15
(4.202538891327675, 8.7653765684624, 7.177692105826908, 3.803061111111111, 2.194396794871795, 0.0, 6.61503888888889, 8.77758717948718, 5.7045916666666665, 4.785128070551272, 2.1913441421156, 0.0), # 16
(4.227490125353625, 8.81913843679854, 7.197083721615253, 3.8138842693236716, 2.202710416666667, 0.0, 6.611696188103866, 8.810841666666668, 5.720826403985508, 4.798055814410168, 2.204784609199635, 0.0), # 17
(4.25157629499561, 8.870204848484848, 7.215200514138818, 3.824008695652174, 2.2104923076923084, 0.0, 6.608289130434783, 8.841969230769234, 5.736013043478262, 4.810133676092545, 2.217551212121212, 0.0), # 18
(4.274768182372451, 8.918485814043208, 7.232016615788346, 3.8334179045893717, 2.2177290064102566, 0.0, 6.604818032910629, 8.870916025641026, 5.750126856884058, 4.8213444105255645, 2.229621453510802, 0.0), # 19
(4.297036569602966, 8.96389134399551, 7.247506158954584, 3.8420954106280196, 2.2244070512820517, 0.0, 6.601283212560387, 8.897628205128207, 5.76314311594203, 4.831670772636389, 2.2409728359988774, 0.0), # 20
(4.318352238805971, 9.006331448863634, 7.261643276028279, 3.8500247282608693, 2.2305129807692303, 0.0, 6.597684986413044, 8.922051923076921, 5.775037092391305, 4.841095517352186, 2.2515828622159084, 0.0), # 21
(4.338685972100283, 9.045716139169473, 7.274402099400172, 3.8571893719806765, 2.2360333333333333, 0.0, 6.5940236714975855, 8.944133333333333, 5.785784057971015, 4.849601399600115, 2.2614290347923682, 0.0), # 22
(4.358008551604722, 9.081955425434906, 7.285756761461012, 3.8635728562801934, 2.2409546474358972, 0.0, 6.590299584842997, 8.963818589743589, 5.79535928442029, 4.857171174307341, 2.2704888563587264, 0.0), # 23
(4.3762907594381035, 9.114959318181818, 7.295681394601543, 3.869158695652174, 2.2452634615384612, 0.0, 6.586513043478261, 8.981053846153845, 5.803738043478262, 4.863787596401028, 2.2787398295454544, 0.0), # 24
(4.393503377719247, 9.1446378279321, 7.304150131212511, 3.8739304045893723, 2.2489463141025636, 0.0, 6.582664364432368, 8.995785256410255, 5.810895606884059, 4.869433420808341, 2.286159456983025, 0.0), # 25
(4.409617188566969, 9.17090096520763, 7.311137103684661, 3.8778714975845405, 2.2519897435897436, 0.0, 6.5787538647343, 9.007958974358974, 5.816807246376811, 4.874091402456441, 2.2927252413019077, 0.0), # 26
(4.424602974100088, 9.193658740530301, 7.31661644440874, 3.880965489130435, 2.2543802884615385, 0.0, 6.574781861413045, 9.017521153846154, 5.821448233695653, 4.877744296272493, 2.2984146851325753, 0.0), # 27
(4.438431516437421, 9.212821164421996, 7.320562285775494, 3.8831958937198072, 2.256104487179487, 0.0, 6.570748671497586, 9.024417948717948, 5.824793840579711, 4.8803748571836625, 2.303205291105499, 0.0), # 28
(4.4510735976977855, 9.228298247404602, 7.322948760175664, 3.884546225845411, 2.257148878205128, 0.0, 6.566654612016909, 9.028595512820512, 5.826819338768117, 4.881965840117109, 2.3070745618511506, 0.0), # 29
(4.4625, 9.24, 7.32375, 3.885, 2.2575000000000003, 0.0, 6.562500000000001, 9.030000000000001, 5.8275, 4.8825, 2.31, 0.0), # 30
(4.47319183983376, 9.249720255681815, 7.323149356884057, 3.884918047385621, 2.257372225177305, 0.0, 6.556726763701484, 9.02948890070922, 5.827377071078432, 4.882099571256038, 2.312430063920454, 0.0), # 31
(4.4836528452685425, 9.259312045454546, 7.3213644202898545, 3.884673790849673, 2.2569916312056737, 0.0, 6.547834661835751, 9.027966524822695, 5.82701068627451, 4.880909613526569, 2.3148280113636366, 0.0), # 32
(4.493887715792838, 9.268774176136363, 7.3184206793478275, 3.8842696323529413, 2.2563623138297872, 0.0, 6.535910757121439, 9.025449255319149, 5.826404448529412, 4.878947119565218, 2.3171935440340907, 0.0), # 33
(4.503901150895141, 9.278105454545454, 7.314343623188405, 3.8837079738562093, 2.2554883687943263, 0.0, 6.521042112277196, 9.021953475177305, 5.825561960784314, 4.876229082125604, 2.3195263636363634, 0.0), # 34
(4.513697850063939, 9.287304687499997, 7.3091587409420296, 3.882991217320261, 2.2543738918439717, 0.0, 6.503315790021656, 9.017495567375887, 5.824486825980392, 4.872772493961353, 2.3218261718749993, 0.0), # 35
(4.523282512787724, 9.296370681818182, 7.302891521739131, 3.8821217647058828, 2.253022978723404, 0.0, 6.482818853073463, 9.012091914893617, 5.823182647058824, 4.868594347826087, 2.3240926704545455, 0.0), # 36
(4.532659838554988, 9.305302244318183, 7.295567454710145, 3.881102017973856, 2.2514397251773044, 0.0, 6.4596383641512585, 9.005758900709218, 5.821653026960784, 4.86371163647343, 2.3263255610795457, 0.0), # 37
(4.5418345268542195, 9.314098181818181, 7.287212028985508, 3.8799343790849674, 2.249628226950355, 0.0, 6.433861385973679, 8.99851290780142, 5.819901568627452, 4.858141352657005, 2.3285245454545453, 0.0), # 38
(4.5508112771739135, 9.322757301136363, 7.277850733695652, 3.87862125, 2.247592579787234, 0.0, 6.40557498125937, 8.990370319148935, 5.817931875, 4.8519004891304345, 2.330689325284091, 0.0), # 39
(4.559594789002558, 9.33127840909091, 7.267509057971015, 3.8771650326797387, 2.245336879432624, 0.0, 6.37486621272697, 8.981347517730496, 5.815747549019608, 4.845006038647344, 2.3328196022727274, 0.0), # 40
(4.568189761828645, 9.3396603125, 7.256212490942029, 3.8755681290849675, 2.2428652216312055, 0.0, 6.34182214309512, 8.971460886524822, 5.813352193627452, 4.837474993961353, 2.334915078125, 0.0), # 41
(4.576600895140665, 9.34790181818182, 7.2439865217391315, 3.8738329411764707, 2.2401817021276598, 0.0, 6.3065298350824595, 8.960726808510639, 5.810749411764706, 4.829324347826088, 2.336975454545455, 0.0), # 42
(4.584832888427111, 9.356001732954544, 7.230856639492753, 3.8719618709150327, 2.2372904166666667, 0.0, 6.26907635140763, 8.949161666666667, 5.80794280637255, 4.820571092995169, 2.339000433238636, 0.0), # 43
(4.592890441176471, 9.363958863636363, 7.216848333333333, 3.8699573202614377, 2.2341954609929076, 0.0, 6.229548754789272, 8.93678184397163, 5.804935980392157, 4.811232222222222, 2.3409897159090907, 0.0), # 44
(4.600778252877237, 9.371772017045453, 7.201987092391306, 3.8678216911764705, 2.230900930851064, 0.0, 6.188034107946028, 8.923603723404256, 5.801732536764706, 4.80132472826087, 2.3429430042613633, 0.0), # 45
(4.6085010230179035, 9.379440000000002, 7.186298405797103, 3.8655573856209147, 2.2274109219858156, 0.0, 6.144619473596536, 8.909643687943262, 5.798336078431372, 4.790865603864735, 2.3448600000000006, 0.0), # 46
(4.616063451086957, 9.386961619318182, 7.16980776268116, 3.8631668055555552, 2.223729530141844, 0.0, 6.099391914459438, 8.894918120567375, 5.794750208333333, 4.77987184178744, 2.3467404048295455, 0.0), # 47
(4.623470236572891, 9.394335681818182, 7.152540652173913, 3.8606523529411763, 2.21986085106383, 0.0, 6.052438493253375, 8.87944340425532, 5.790978529411765, 4.7683604347826085, 2.3485839204545456, 0.0), # 48
(4.630726078964194, 9.401560994318181, 7.134522563405797, 3.8580164297385626, 2.2158089804964543, 0.0, 6.003846272696985, 8.863235921985817, 5.787024644607844, 4.7563483756038645, 2.3503902485795454, 0.0), # 49
(4.6378356777493615, 9.408636363636361, 7.115778985507247, 3.8552614379084966, 2.211578014184397, 0.0, 5.953702315508913, 8.846312056737588, 5.782892156862745, 4.743852657004831, 2.3521590909090904, 0.0), # 50
(4.6448037324168805, 9.415560596590907, 7.096335407608696, 3.852389779411765, 2.2071720478723407, 0.0, 5.902093684407797, 8.828688191489363, 5.778584669117648, 4.73089027173913, 2.353890149147727, 0.0), # 51
(4.651634942455243, 9.4223325, 7.0762173188405795, 3.84940385620915, 2.2025951773049646, 0.0, 5.849107442112278, 8.810380709219858, 5.774105784313726, 4.717478212560386, 2.355583125, 0.0), # 52
(4.658334007352941, 9.428950880681818, 7.055450208333333, 3.8463060702614382, 2.1978514982269504, 0.0, 5.794830651340996, 8.791405992907801, 5.769459105392158, 4.703633472222222, 2.3572377201704544, 0.0), # 53
(4.6649056265984665, 9.435414545454544, 7.034059565217391, 3.843098823529412, 2.192945106382979, 0.0, 5.739350374812594, 8.771780425531915, 5.764648235294119, 4.689373043478261, 2.358853636363636, 0.0), # 54
(4.671354499680307, 9.441722301136364, 7.012070878623187, 3.8397845179738566, 2.1878800975177306, 0.0, 5.682753675245711, 8.751520390070922, 5.759676776960785, 4.674713919082125, 2.360430575284091, 0.0), # 55
(4.677685326086957, 9.447872954545453, 6.989509637681159, 3.8363655555555556, 2.1826605673758865, 0.0, 5.625127615358988, 8.730642269503546, 5.754548333333334, 4.65967309178744, 2.361968238636363, 0.0), # 56
(4.683902805306906, 9.453865312500001, 6.966401331521738, 3.832844338235294, 2.1772906117021273, 0.0, 5.566559257871065, 8.70916244680851, 5.749266507352941, 4.644267554347826, 2.3634663281250003, 0.0), # 57
(4.690011636828645, 9.459698181818181, 6.942771449275362, 3.8292232679738563, 2.1717743262411346, 0.0, 5.507135665500583, 8.687097304964539, 5.743834901960785, 4.628514299516908, 2.3649245454545453, 0.0), # 58
(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), # 59
)
passenger_allighting_rate = (
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 0
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 1
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 2
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 3
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 4
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 5
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 6
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 7
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 8
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 9
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 10
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 11
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 12
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 13
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 14
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 15
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 16
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 17
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 18
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 19
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 20
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 21
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 22
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 23
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 24
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 25
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 26
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 27
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 28
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 29
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 30
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 31
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 32
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 33
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 34
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 35
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 36
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 37
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 38
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 39
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 40
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 41
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 42
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 43
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 44
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 45
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 46
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 47
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 48
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 49
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 50
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 51
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 52
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 53
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 54
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 55
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 56
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 57
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 58
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 59
)
"""
parameters for reproducibiliy. More information: https://numpy.org/doc/stable/reference/random/parallel.html
"""
#initial entropy
entropy = 258194110137029475889902652135037600173
#index for seed sequence child
child_seed_index = (
1, # 0
5, # 1
)
| 113.110448
| 212
| 0.729125
| 5,147
| 37,892
| 5.36565
| 0.229648
| 0.312851
| 0.247674
| 0.469276
| 0.329724
| 0.328204
| 0.327769
| 0.327769
| 0.327769
| 0.327769
| 0
| 0.819042
| 0.119128
| 37,892
| 334
| 213
| 113.449102
| 0.008359
| 0.031959
| 0
| 0.202532
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.015823
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0486e5b4baeb456901789a5f4929aaed8f07c550
| 440
|
py
|
Python
|
getDomainAge/tests/test_handlers/test_log.py
|
ljnath/getDomainAge
|
a15337433d319597c1a705b49553e31620e00058
|
[
"MIT"
] | 2
|
2020-03-12T14:43:19.000Z
|
2021-08-02T06:21:52.000Z
|
getDomainAge/tests/test_handlers/test_log.py
|
ljnath/getDomainAge
|
a15337433d319597c1a705b49553e31620e00058
|
[
"MIT"
] | 2
|
2021-05-25T16:01:29.000Z
|
2021-09-07T19:17:01.000Z
|
getDomainAge/tests/test_handlers/test_log.py
|
ljnath/getDomainAge
|
a15337433d319597c1a705b49553e31620e00058
|
[
"MIT"
] | null | null | null |
from getDomainAge.handlers.log import LogHandler
def test_duplicate_logger():
logger_1 = LogHandler().get_logger('test-logger', 'test.log')
logger_2 = LogHandler().get_logger('test-logger', 'test.log')
assert logger_1 == logger_2
def test_unique_logger():
logger_1 = LogHandler().get_logger('test-logger-1', 'test.log')
logger_2 = LogHandler().get_logger('test-logger-2', 'test.log')
assert logger_1 != logger_2
| 31.428571
| 67
| 0.713636
| 62
| 440
| 4.806452
| 0.241935
| 0.201342
| 0.255034
| 0.308725
| 0.751678
| 0.751678
| 0.751678
| 0.57047
| 0.288591
| 0
| 0
| 0.026316
| 0.136364
| 440
| 13
| 68
| 33.846154
| 0.757895
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 1
| 0.222222
| false
| 0
| 0.111111
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
04895808db64e097b6ae73f300c11279646d7a43
| 171
|
py
|
Python
|
tests/conftest.py
|
gvinet/pynfcreader
|
e0bcd1151fcf3ad02191b0ad0ec0dc5cb6c00ef5
|
[
"Apache-2.0"
] | 9
|
2015-05-05T21:46:52.000Z
|
2022-01-25T20:47:31.000Z
|
tests/conftest.py
|
gvinet/pynfcreader
|
e0bcd1151fcf3ad02191b0ad0ec0dc5cb6c00ef5
|
[
"Apache-2.0"
] | 2
|
2018-01-11T02:03:20.000Z
|
2020-06-01T16:32:23.000Z
|
tests/conftest.py
|
gvinet/pynfcreader
|
e0bcd1151fcf3ad02191b0ad0ec0dc5cb6c00ef5
|
[
"Apache-2.0"
] | 1
|
2016-08-17T22:35:53.000Z
|
2016-08-17T22:35:53.000Z
|
import pytest
from pynfcreader.devices.hydra_nfc_v2 import HydraNFCv2
@pytest.fixture
def hydranfc_connection():
return HydraNFCv2(port="/dev/ttyACM0", debug=False)
| 21.375
| 55
| 0.80117
| 22
| 171
| 6.090909
| 0.863636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026144
| 0.105263
| 171
| 7
| 56
| 24.428571
| 0.849673
| 0
| 0
| 0
| 0
| 0
| 0.070175
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
04c40f10f85b7fd737729a587b95c68e80435a29
| 7,030
|
py
|
Python
|
tests/cache/test_cache_file.py
|
obendidi/httpx-cache
|
897dd8da5bb377ed7f61b367716976bdc0d581b1
|
[
"BSD-3-Clause"
] | 16
|
2021-12-13T01:27:44.000Z
|
2022-02-28T02:58:46.000Z
|
tests/cache/test_cache_file.py
|
obendidi/httpx-cache
|
897dd8da5bb377ed7f61b367716976bdc0d581b1
|
[
"BSD-3-Clause"
] | 23
|
2022-01-03T15:57:39.000Z
|
2022-03-28T22:25:08.000Z
|
tests/cache/test_cache_file.py
|
obendidi/httpx-cache
|
897dd8da5bb377ed7f61b367716976bdc0d581b1
|
[
"BSD-3-Clause"
] | 2
|
2022-01-21T17:57:19.000Z
|
2022-01-21T18:18:47.000Z
|
from pathlib import Path
import anyio
import httpx
import mock
import pytest
import httpx_cache
pytestmark = pytest.mark.anyio
testcases = [
httpx_cache.BytesJsonSerializer(),
httpx_cache.MsgPackSerializer(),
]
testids = [
"BytesJsonSerializer",
"MsgPackSerializer",
]
def test_file_cache_init_bad_serializer():
with pytest.raises(TypeError):
httpx_cache.FileCache(serializer="Serial")
@mock.patch.object(Path, "mkdir")
def test_file_cache_init_default_cache_dir(mock_mkdir: mock.MagicMock):
cache = httpx_cache.FileCache()
default_cache_dir = Path.home() / ".cache/httpx-cache"
assert cache.cache_dir == default_cache_dir
mock_mkdir.assert_called_once_with(exist_ok=True)
@mock.patch.object(Path, "mkdir")
def test_file_cache_init_bad_custom_path_cache_dir(mock_mkdir: mock.MagicMock):
cache_dir = Path("./some-path")
cache = httpx_cache.FileCache(cache_dir=cache_dir)
assert cache.cache_dir == cache_dir
mock_mkdir.assert_called_once_with(exist_ok=True)
@mock.patch.object(Path, "mkdir")
def test_file_cache_init_bad_custom_str_cache_dir(mock_mkdir: mock.MagicMock):
cache_dir = "./some-path"
cache = httpx_cache.FileCache(cache_dir=cache_dir)
assert isinstance(cache.cache_dir, Path)
assert cache.cache_dir == Path(cache_dir)
mock_mkdir.assert_called_once_with(exist_ok=True)
@mock.patch.object(Path, "mkdir", new=lambda *args, **kwargs: None)
@mock.patch.object(Path, "is_file", return_value=False)
def test_file_cache_get_not_found(
mock_is_file: mock.MagicMock,
file_cache: httpx_cache.FileCache,
httpx_request: httpx.Request,
):
cached = file_cache.get(httpx_request)
mock_is_file.assert_called_once_with()
assert cached is None
@mock.patch.object(Path, "mkdir", new=lambda *args, **kwargs: None)
@mock.patch.object(anyio.Path, "is_file", return_value=False)
async def test_file_cache_aget_not_found(
mock_is_file: mock.AsyncMock,
file_cache: httpx_cache.FileCache,
httpx_request: httpx.Request,
):
cached = await file_cache.aget(httpx_request)
mock_is_file.assert_awaited_once_with()
assert cached is None
def test_file_cache_set_get_delete(
file_cache: httpx_cache.FileCache,
httpx_request: httpx.Request,
httpx_response: httpx.Response,
):
# make sure cache_dir is new and empty
assert len(list(file_cache.cache_dir.glob("**/*"))) == 0
# check again that cache is empty
cached_response = file_cache.get(httpx_request)
assert cached_response is None
# cache a request
file_cache.set(request=httpx_request, response=httpx_response, content=None)
assert len(list(file_cache.cache_dir.glob("**/*"))) == 1
# get the cached response
cached_response = file_cache.get(httpx_request)
assert cached_response is not None
assert cached_response.status_code == httpx_response.status_code
assert cached_response.content == httpx_response.content
assert cached_response.headers == httpx_response.headers
# delete the cached response
file_cache.delete(httpx_request)
assert len(list(file_cache.cache_dir.glob("**/*"))) == 0
# delete with cached file not found
# should do nothing (not raise an error)
file_cache.delete(httpx_request)
file_cache.close()
async def test_file_cache_aset_aget_adelete(
file_cache: httpx_cache.FileCache,
httpx_request: httpx.Request,
httpx_response: httpx.Response,
):
assert len(list(file_cache.cache_dir.glob("**/*"))) == 0
# cache a request
await file_cache.aset(request=httpx_request, response=httpx_response, content=None)
# make sure we have one request inside
assert len(list(file_cache.cache_dir.glob("**/*"))) == 1
# get the cached response
cached_response = await file_cache.aget(httpx_request)
assert cached_response is not None
assert cached_response.status_code == httpx_response.status_code
assert cached_response.content == httpx_response.content
assert cached_response.headers == httpx_response.headers
# delete the cached response
await file_cache.adelete(httpx_request)
assert len(list(file_cache.cache_dir.glob("**/*"))) == 0
await file_cache.aclose()
def test_file_cache_set_get_delete_with_streaming_body(
file_cache: httpx_cache.FileCache,
httpx_request: httpx.Request,
streaming_body,
):
assert len(list(file_cache.cache_dir.glob("**/*"))) == 0
httpx_response = httpx.Response(200, content=streaming_body)
def callback(content: bytes) -> None:
# set it in cache
file_cache.set(request=httpx_request, response=httpx_response, content=content)
# wrap the response stream
httpx_response.stream = httpx_cache.ByteStreamWrapper(
stream=httpx_response.stream, callback=callback # type: ignore
)
# when read the response, it will be cached using the callback
httpx_response.read()
# make sure we have one request inside
assert len(list(file_cache.cache_dir.glob("**/*"))) == 1
# get the cached response
cached_response = file_cache.get(httpx_request)
assert cached_response is not None
assert cached_response.status_code == httpx_response.status_code
assert cached_response.headers == httpx_response.headers
with pytest.raises(httpx.ResponseNotRead):
cached_response.content
assert cached_response.read() == httpx_response.content
# delete the cached response
file_cache.delete(httpx_request)
assert len(list(file_cache.cache_dir.glob("**/*"))) == 0
file_cache.close()
async def test_file_cache_aset_aget_adelete_with_async_streaming_body(
file_cache: httpx_cache.FileCache,
httpx_request: httpx.Request,
async_streaming_body,
):
assert len(list(file_cache.cache_dir.glob("**/*"))) == 0
httpx_response = httpx.Response(200, content=async_streaming_body)
async def callback(content: bytes) -> None:
# set it in cache
await file_cache.aset(
request=httpx_request, response=httpx_response, content=content
)
# wrap the response stream
httpx_response.stream = httpx_cache.ByteStreamWrapper(
stream=httpx_response.stream, callback=callback # type: ignore
)
# when read the response, it will be cached using the callback
await httpx_response.aread()
# make sure we have one request inside
assert len(list(file_cache.cache_dir.glob("**/*"))) == 1
# get the cached response
cached_response = await file_cache.aget(httpx_request)
assert cached_response is not None
assert cached_response.status_code == httpx_response.status_code
assert cached_response.headers == httpx_response.headers
with pytest.raises(httpx.ResponseNotRead):
cached_response.content
assert await cached_response.aread() == httpx_response.content
# delete the cached response
await file_cache.adelete(httpx_request)
assert len(list(file_cache.cache_dir.glob("**/*"))) == 0
await file_cache.aclose()
| 31.524664
| 87
| 0.736273
| 950
| 7,030
| 5.166316
| 0.122105
| 0.08802
| 0.04238
| 0.041565
| 0.847188
| 0.827628
| 0.77771
| 0.751019
| 0.733904
| 0.703749
| 0
| 0.00307
| 0.166003
| 7,030
| 222
| 88
| 31.666667
| 0.834044
| 0.102134
| 0
| 0.56338
| 0
| 0
| 0.026885
| 0
| 0
| 0
| 0
| 0
| 0.28169
| 1
| 0.056338
| false
| 0
| 0.042254
| 0
| 0.098592
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
04d4c2971ed993f941578510a8b411ee81e5347b
| 62
|
py
|
Python
|
pcmdi_metrics/diurnal/__init__.py
|
tomvothecoder/pcmdi_metrics
|
34cdd56a78859db6417cbc7018c8ae8bbf2f09b5
|
[
"BSD-3-Clause"
] | 47
|
2015-03-18T22:44:51.000Z
|
2022-01-30T04:35:05.000Z
|
pcmdi_metrics/diurnal/__init__.py
|
tomvothecoder/pcmdi_metrics
|
34cdd56a78859db6417cbc7018c8ae8bbf2f09b5
|
[
"BSD-3-Clause"
] | 524
|
2015-01-01T04:00:34.000Z
|
2022-03-31T15:06:46.000Z
|
pcmdi_metrics/diurnal/__init__.py
|
tomvothecoder/pcmdi_metrics
|
34cdd56a78859db6417cbc7018c8ae8bbf2f09b5
|
[
"BSD-3-Clause"
] | 30
|
2015-06-05T17:19:43.000Z
|
2021-11-02T15:22:21.000Z
|
from . import common # noqa
from . import fourierFFT # noqa
| 20.666667
| 32
| 0.709677
| 8
| 62
| 5.5
| 0.625
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.225806
| 62
| 2
| 33
| 31
| 0.916667
| 0.145161
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f3dbfae542b152c5fff5ade55a6d2c37bc851cab
| 35
|
py
|
Python
|
src/indexa/__init__.py
|
villoro/airflow_tasks
|
81bd892744a9bbbf6e01903649b6c3786a955a5a
|
[
"MIT"
] | null | null | null |
src/indexa/__init__.py
|
villoro/airflow_tasks
|
81bd892744a9bbbf6e01903649b6c3786a955a5a
|
[
"MIT"
] | 4
|
2020-10-09T15:59:09.000Z
|
2020-11-18T08:34:44.000Z
|
src/indexa/__init__.py
|
villoro/airflow_tasks
|
81bd892744a9bbbf6e01903649b6c3786a955a5a
|
[
"MIT"
] | null | null | null |
from .process import update_indexa
| 17.5
| 34
| 0.857143
| 5
| 35
| 5.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 35
| 1
| 35
| 35
| 0.935484
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6d2e304e1f7739c1e86f615b8ec3bd0f858e5919
| 139
|
py
|
Python
|
prive/threat_models/__init__.py
|
alan-turing-institute/privacy-sdg-toolbox
|
cdd61e3b59d2306906a6eaf7910b9c99a19e9e15
|
[
"MIT"
] | 2
|
2022-03-24T15:17:49.000Z
|
2022-03-31T09:21:32.000Z
|
prive/threat_models/__init__.py
|
alan-turing-institute/privacy-sdg-toolbox
|
cdd61e3b59d2306906a6eaf7910b9c99a19e9e15
|
[
"MIT"
] | 1
|
2022-03-31T10:34:48.000Z
|
2022-03-31T10:34:48.000Z
|
prive/threat_models/__init__.py
|
alan-turing-institute/privacy-sdg-toolbox
|
cdd61e3b59d2306906a6eaf7910b9c99a19e9e15
|
[
"MIT"
] | null | null | null |
from .base_classes import ThreatModel, StaticDataThreatModel, InteractiveThreatModel
from .mia import TargetedMIA, TargetedAuxiliaryDataMIA
| 69.5
| 84
| 0.892086
| 12
| 139
| 10.25
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071942
| 139
| 2
| 85
| 69.5
| 0.953488
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6d4501d1a8c435d31c967cddaa38c235ad6907f4
| 41
|
py
|
Python
|
fastapi/core/__init__.py
|
ilDug/docker-utils
|
6580e916a8c2c0d91f2e3da52a9d839507569bb7
|
[
"MIT"
] | null | null | null |
fastapi/core/__init__.py
|
ilDug/docker-utils
|
6580e916a8c2c0d91f2e3da52a9d839507569bb7
|
[
"MIT"
] | null | null | null |
fastapi/core/__init__.py
|
ilDug/docker-utils
|
6580e916a8c2c0d91f2e3da52a9d839507569bb7
|
[
"MIT"
] | null | null | null |
from .mail import DagMail, DagMailConfig
| 20.5
| 40
| 0.829268
| 5
| 41
| 6.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121951
| 41
| 1
| 41
| 41
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6d584854bc3f9f310bb09ade158cbfef2c7f9972
| 9,179
|
py
|
Python
|
MultiLayerPerceptron.py
|
kumass2020/NeuralNetwork-MultiLayer-Perceptron
|
38a6c33828bafad685c81e213492d30ca24e4dfb
|
[
"MIT"
] | null | null | null |
MultiLayerPerceptron.py
|
kumass2020/NeuralNetwork-MultiLayer-Perceptron
|
38a6c33828bafad685c81e213492d30ca24e4dfb
|
[
"MIT"
] | null | null | null |
MultiLayerPerceptron.py
|
kumass2020/NeuralNetwork-MultiLayer-Perceptron
|
38a6c33828bafad685c81e213492d30ca24e4dfb
|
[
"MIT"
] | null | null | null |
import numpy as np
from math import ceil, floor
def init_network():
# network = {}
# network['']
x1 = np.asfarray([[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1]])
d1 = np.asfarray([1, 0, 0, 0, 0, 0, 0, 0, 0, 0])
x2 = np.asfarray([[1, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1]])
d2 = np.asfarray([0, 1, 0, 0, 0, 0, 0, 0, 0, 0])
x3 = np.asfarray([[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1]])
d3 = np.asfarray([0, 0, 1, 0, 0, 0, 0, 0, 0, 0])
x4 = np.asfarray([[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1]])
d4 = np.asfarray([0, 0, 0, 1, 0, 0, 0, 0, 0, 0])
x5 = np.asfarray([[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 0, 0, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 0, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1]])
d5 = np.asfarray([0, 0, 0, 0, 1, 0, 0, 0, 0, 0])
x6 = np.asfarray([[1, 1, 0, 0, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 0, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 0, 0, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 0, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1]])
d6 = np.asfarray([0, 0, 0, 0, 0, 1, 0, 0, 0, 0])
x7 = np.asfarray([[0, 0, 0, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 1, 1, 0, 0, 1, 1, 0],
[0, 1, 1, 0, 0, 1, 1, 0],
[1, 1, 0, 0, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 0, 1, 1]])
d7 = np.asfarray([0, 0, 0, 0, 0, 0, 1, 0, 0, 0])
x8 = np.asfarray([[0, 0, 0, 1, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 1, 1, 0, 0, 1, 1, 0],
[1, 1, 0, 0, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 0, 1, 1],
[0, 1, 1, 0, 0, 1, 1, 0],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 1, 0, 0, 0]])
d8 = np.asfarray([0, 0, 0, 0, 0, 0, 0, 1, 0, 0])
x9 = np.asfarray([[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 1, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 1, 1, 0, 0, 1, 1, 0],
[1, 1, 1, 0, 0, 1, 1, 1],
[1, 1, 0, 0, 0, 0, 1, 1]])
d9 = np.asfarray([0, 0, 0, 0, 0, 0, 0, 0, 1, 0])
x10 = np.asfarray([[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1]])
d10 = np.asfarray([0, 0, 0, 0, 0, 0, 0, 0, 0, 1])
# 가중치
# W1 = np.full((64, 5), 0.5)
# W2 = np.full((5, 10), 0.5)
W1 = np.random.normal(scale=0.1, size=(64, 5))
W2 = np.random.normal(scale=0.1, size=(5, 10))
# # 은닉층 활성화 함수(Sigmoid) 전, 후
# A = np.asfarray([0, 0, 0, 0, 0])
# Z = np.asfarray([0, 0, 0, 0, 0])
# X: 입력패턴, D: 출력패턴
X = [x1, x2, x3, x4, x5, x6, x7, x8, x9, x10]
D = [d1, d2, d3, d4, d5, d6, d7, d8, d9, d10]
return X, D, W1, W2
def makeNoise(x):
if x == 0:
x = 1
elif x == 1:
x = 0
return x
class Sigmoid:
def __init__(self):
self.out = None
def forward(self, x):
out = 1 / (1 + np.exp(-x))
return out
# 입력층 : 64개
# 은닉층 : 5개
# 출력층 : 10개
sigmoid1 = Sigmoid()
sigmoid2 = Sigmoid()
offset = 0
momentum = 1.0
eta = 0.1
bias1 = [0.0 for i in range(5)]
bias2 = [0.0 for i in range(10)]
# bias1 = [0.5, 1, 1, 1, 1]
# bias2 = [0.5, 1, 1, 1, 1, 1, 1, 1, 1, 1]
X, D, W1, W2 = init_network()
epoch = 0
# 은닉층 오차
delta1 = np.full((10, 5), 0.0)
# 출력층 오차
delta2 = np.full((10, 10), 0.0)
A = [0.0 for i in range(5)]
Z = [0.0 for i in range(5)]
O = np.full((10, 10), 0.0)
tmp = np.full((64, 5), 0.0)
test = X[0]
test = test.flatten()
while epoch < 100001:
print("epoch: " + str(epoch))
for i in range(10): # ㄱ, ㄴ, ㄷ, ...
# print("i: " + str(i))
for k in range(5): # A[0], A[1], ...
# test
test4 = X[i].flatten()
test5 = W1[:, k]
tmp = X[i]
# epoch 두 번마다 노이즈 넣어 데이터 셋 증가 효과
if epoch % 2 == 0:
tmp_x = np.random.randint(0, 8)
tmp_y = np.random.randint(0, 8)
tmp[tmp_x][tmp_y] = makeNoise(tmp[tmp_x][tmp_y])
# 은닉층 업데이트
A[k] = np.dot(tmp.flatten(), W1[:, k]) + bias1[k]
# 은닉층 내에서 활성화 함수(시그모이드) 적용
Z[k] = sigmoid1.forward(A[k])
test3 = Z
for j in range(10):
# print("j: " + str(j))
test1 = np.asfarray(Z).flatten()
test2 = W2[:, j]
# 출력층 업데이트
O[i][j] = np.dot(np.asfarray(Z).flatten(), W2[:, j]) + bias2[j]
# 출력층 내에서 활성화 함수(시그모이드) 적용
O[i][j] = sigmoid2.forward(O[i][j])
test6 = O[i][j]
test7 = 1 - O[i][j]
test8 = (D[i])[j] - O[i][j]
# print("오차:", (D[i])[i] - O[i][j])
delta2[i][j] = O[i][j] * (1 - O[i][j]) * ((D[i])[j] - O[i][j])
# delta2 = D[i] - O[i]
for m in range(5):
# print("m: " + str(m))
summ = 0
for n in range(10):
# print("n: " + str(n))
summ += delta2[i][n] * W2[m][n]
delta1[i][m] = Z[m] * (1 - Z[m]) * summ
# delta1 = np.asfarray(Z) * (1.0 - np.asfarray(Z)) * np.dot(W2.T, delta2)
# 역전파 1
for n in range(5):
for o in range(10):
W2[n][o] = momentum * W2[n][o] + eta * delta2[i][o] * Z[n]
# 역전파 2
for k in range(64):
for j in range(5):
W1[k][j] = momentum * W1[k][j] + eta * delta1[i][j] * (X[i]).flatten()[k]
print()
print(str(O[i]) + " " + str(i))
print("")
epoch += 1
# noise_pattern = np.asfarray([[1, 1, 0, 1, 1, 1, 1, 1],
# [1, 1, 1, 1, 1, 1, 1, 0],
# [0, 0, 1, 0, 0, 0, 1, 1],
# [0, 0, 0, 0, 0, 0, 1, 1],
# [0, 0, 0, 0, 0, 1, 0, 1],
# [0, 0, 0, 0, 0, 0, 1, 1],
# [0, 0, 0, 0, 0, 0, 1, 1],
# [0, 0, 0, 0, 0, 0, 1, 1]])
noise_pattern = np.asfarray([[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1]])
# 학습된 가중치를 기반으로 글자 분류
for k in range(5):
A[k] = np.dot(noise_pattern.flatten(), W1[:, k])
Z[k] = sigmoid1.forward(A[k])
for j in range(10):
O[0][j] = np.dot(np.asfarray(Z).flatten(), W2[:, j])
O[0][j] = sigmoid2.forward(O[0][j])
# 출력 유니트
result = O[0].tolist()
for i in range(10):
result[i] = 1 - result[i]
# for i in range(10):
# result[i] = float(result[i])
pos = result.index(max(result))
if pos == 0:
str1 = "ㄱ" + " - " + str(result[0])
elif pos == 1:
str1 = "ㄴ"
elif pos == 2:
str1 = "ㄷ"
elif pos == 3:
str1 = "ㄹ"
elif pos == 4:
str1 = "ㅁ"
elif pos == 5:
str1 = "ㅂ"
elif pos == 6:
str1 = "ㅅ"
elif pos == 7:
str1 = "ㅇ"
elif pos == 8:
str1 = "ㅈ"
elif pos == 9:
str1 = "ㅋ"
else:
print("error")
print(str(result) + " " + str1)
| 31.221088
| 89
| 0.327705
| 1,634
| 9,179
| 1.831701
| 0.097919
| 0.260608
| 0.309723
| 0.366188
| 0.529235
| 0.490812
| 0.430003
| 0.359506
| 0.349148
| 0.294353
| 0
| 0.225338
| 0.452228
| 9,179
| 293
| 90
| 31.327645
| 0.369928
| 0.126484
| 0
| 0.402913
| 0
| 0
| 0.003387
| 0
| 0.004854
| 0
| 0
| 0
| 0
| 1
| 0.019417
| false
| 0
| 0.009709
| 0
| 0.048544
| 0.029126
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6d5f3dda305c48d4c716daf49f9c876e3423c8e8
| 116
|
py
|
Python
|
Back/ecoreleve_server/GenericObjets/__init__.py
|
NaturalSolutions/NS.Bootstrap
|
c2cc73717dbe769e064c3254a5b20cb16b37bda2
|
[
"MIT"
] | null | null | null |
Back/ecoreleve_server/GenericObjets/__init__.py
|
NaturalSolutions/NS.Bootstrap
|
c2cc73717dbe769e064c3254a5b20cb16b37bda2
|
[
"MIT"
] | null | null | null |
Back/ecoreleve_server/GenericObjets/__init__.py
|
NaturalSolutions/NS.Bootstrap
|
c2cc73717dbe769e064c3254a5b20cb16b37bda2
|
[
"MIT"
] | null | null | null |
from . import ObjectWithDynProp
from . import FrontModules
from .ListObjectWithDynProp import ListObjectWithDynProp
| 29
| 56
| 0.87069
| 10
| 116
| 10.1
| 0.5
| 0.19802
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 116
| 3
| 57
| 38.666667
| 0.971154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ed9112faa09ba1d758751f4bb62a2040e14b22c9
| 158
|
py
|
Python
|
src/test/datascience/serverConfigFiles/remotePassword.py
|
JesterOrNot/vscode-python
|
f51b0bbf0e21bd6f9becc89ebfcb383f8e840e76
|
[
"MIT"
] | 11
|
2019-11-11T20:45:40.000Z
|
2021-05-08T05:51:36.000Z
|
src/test/datascience/serverConfigFiles/remotePassword.py
|
JesterOrNot/vscode-python
|
f51b0bbf0e21bd6f9becc89ebfcb383f8e840e76
|
[
"MIT"
] | 6
|
2021-01-17T20:21:32.000Z
|
2022-02-10T19:22:36.000Z
|
src/test/datascience/serverConfigFiles/remotePassword.py
|
JesterOrNot/vscode-python
|
f51b0bbf0e21bd6f9becc89ebfcb383f8e840e76
|
[
"MIT"
] | 4
|
2020-02-02T02:18:41.000Z
|
2021-07-07T15:31:17.000Z
|
c.NotebookApp.ip = '0.0.0.0'
c.NotebookApp.open_browser = False
# Python
c.NotebookApp.password = 'sha1:74182e119a7b:e1b98bbba98f9ada3fd714eda9652437e80082e2'
| 39.5
| 85
| 0.816456
| 19
| 158
| 6.736842
| 0.631579
| 0.28125
| 0.046875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0.063291
| 158
| 4
| 85
| 39.5
| 0.614865
| 0.037975
| 0
| 0
| 0
| 0
| 0.430464
| 0.384106
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
611e76f1061e011445c159af4c6af265cc911c62
| 192
|
py
|
Python
|
azurelinuxagent/pa/deprovision/pexos.py
|
pexip/os-walinuxagent
|
ddc7a02bf4276cc7ec9f6671bc1eafc810a76737
|
[
"Apache-2.0"
] | null | null | null |
azurelinuxagent/pa/deprovision/pexos.py
|
pexip/os-walinuxagent
|
ddc7a02bf4276cc7ec9f6671bc1eafc810a76737
|
[
"Apache-2.0"
] | null | null | null |
azurelinuxagent/pa/deprovision/pexos.py
|
pexip/os-walinuxagent
|
ddc7a02bf4276cc7ec9f6671bc1eafc810a76737
|
[
"Apache-2.0"
] | null | null | null |
class PexOSDeprovisionHandler(object):
def __init__(self):
pass
def run(self, force=False, deluser=False):
return
def run_changed_unique_id(self):
return
| 19.2
| 46
| 0.651042
| 22
| 192
| 5.363636
| 0.681818
| 0.101695
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.265625
| 192
| 9
| 47
| 21.333333
| 0.836879
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0.142857
| 0
| 0.285714
| 0.857143
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 6
|
b617cf324ec2460ff9fd91f879b07e5c37941ef1
| 35
|
py
|
Python
|
dizoo/procgen/bigfish/envs/__init__.py
|
davide97l/DI-engine
|
d48c93bcd5c07c29f2ce4ac1b7756b8bc255c423
|
[
"Apache-2.0"
] | null | null | null |
dizoo/procgen/bigfish/envs/__init__.py
|
davide97l/DI-engine
|
d48c93bcd5c07c29f2ce4ac1b7756b8bc255c423
|
[
"Apache-2.0"
] | null | null | null |
dizoo/procgen/bigfish/envs/__init__.py
|
davide97l/DI-engine
|
d48c93bcd5c07c29f2ce4ac1b7756b8bc255c423
|
[
"Apache-2.0"
] | null | null | null |
from .bigfish_env import BigfishEnv
| 35
| 35
| 0.885714
| 5
| 35
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085714
| 35
| 1
| 35
| 35
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b63119e1f602b1247f6231903504598e48391fc9
| 29
|
py
|
Python
|
projects/ABD_Net/__init__.py
|
Yogurt2019/abd-deep-person-reid
|
47c9b9499bffb937891a896f7b5ab7ce7a8f67c4
|
[
"MIT"
] | null | null | null |
projects/ABD_Net/__init__.py
|
Yogurt2019/abd-deep-person-reid
|
47c9b9499bffb937891a896f7b5ab7ce7a8f67c4
|
[
"MIT"
] | null | null | null |
projects/ABD_Net/__init__.py
|
Yogurt2019/abd-deep-person-reid
|
47c9b9499bffb937891a896f7b5ab7ce7a8f67c4
|
[
"MIT"
] | null | null | null |
from .ABD_components import *
| 29
| 29
| 0.827586
| 4
| 29
| 5.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 29
| 1
| 29
| 29
| 0.884615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b69891ecdd34621cb44feda34f5eebeeed50cec3
| 21
|
py
|
Python
|
src/__init__.py
|
shanjunUSC/pyflow-alabmod
|
6e2b9b3991b218f65b9ef705eca08d91f412e43b
|
[
"BSD-2-Clause"
] | null | null | null |
src/__init__.py
|
shanjunUSC/pyflow-alabmod
|
6e2b9b3991b218f65b9ef705eca08d91f412e43b
|
[
"BSD-2-Clause"
] | null | null | null |
src/__init__.py
|
shanjunUSC/pyflow-alabmod
|
6e2b9b3991b218f65b9ef705eca08d91f412e43b
|
[
"BSD-2-Clause"
] | null | null | null |
from pyflow import *
| 10.5
| 20
| 0.761905
| 3
| 21
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 21
| 1
| 21
| 21
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b69f2bd9ef86883db944c10a1c83e57a64c9f9f8
| 87
|
py
|
Python
|
create_db.py
|
Ilyaivanov60/web_dictionary
|
1cbfd9fd78f4bb59a59d6b3d2f727304b138c01e
|
[
"MIT"
] | null | null | null |
create_db.py
|
Ilyaivanov60/web_dictionary
|
1cbfd9fd78f4bb59a59d6b3d2f727304b138c01e
|
[
"MIT"
] | 1
|
2021-01-14T12:47:50.000Z
|
2021-01-14T12:47:50.000Z
|
create_db.py
|
Ilyaivanov60/web_dictionary
|
1cbfd9fd78f4bb59a59d6b3d2f727304b138c01e
|
[
"MIT"
] | null | null | null |
from webapp import create_app
from webapp.db import db
db.create_all(app=create_app())
| 21.75
| 31
| 0.816092
| 16
| 87
| 4.25
| 0.4375
| 0.294118
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 87
| 4
| 31
| 21.75
| 0.871795
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b6aa1db8a0196f3b5058ed396ecc6744eae5bb31
| 1,521
|
py
|
Python
|
tifa/apps/admin/gift_card.py
|
twocucao/tifa
|
f703fd27f54000e7d51f06d2456d09cc79e0ab72
|
[
"MIT"
] | 71
|
2020-04-16T04:28:45.000Z
|
2022-03-31T22:45:11.000Z
|
tifa/apps/admin/gift_card.py
|
twocucao/tifa
|
f703fd27f54000e7d51f06d2456d09cc79e0ab72
|
[
"MIT"
] | 6
|
2021-05-13T06:32:38.000Z
|
2022-03-04T01:18:34.000Z
|
tifa/apps/admin/gift_card.py
|
twocucao/tifa
|
f703fd27f54000e7d51f06d2456d09cc79e0ab72
|
[
"MIT"
] | 12
|
2021-05-01T08:43:11.000Z
|
2022-03-29T00:58:54.000Z
|
from fastapi_utils.api_model import APIModel
from tifa.apps.admin.local import g
from tifa.apps.admin.router import bp
from tifa.models.gift_card import GiftCard
class TGiftCard(APIModel):
id: str
name: str
@bp.list("/gift_cards", out=TGiftCard, summary="GiftCard", tags=["GiftCard"])
async def gift_cards_items():
ins = await g.adal.first_or_404(GiftCard)
return {"items": ins}
@bp.item("/gift_card", out=TGiftCard, summary="GiftCard", tags=["GiftCard"])
async def gift_card_item():
ins = await g.adal.first_or_404(GiftCard)
return {"items": ins}
@bp.op("/gift_card/create", out=TGiftCard, summary="GiftCard", tags=["GiftCard"])
async def gift_card_create():
ins = await g.adal.first_or_404(GiftCard)
return {"items": ins}
@bp.op("/gift_card/update", out=TGiftCard, summary="GiftCard", tags=["GiftCard"])
async def gift_card_update():
ins = await g.adal.first_or_404(GiftCard)
return {"items": ins}
@bp.op("/gift_card/delete", out=TGiftCard, summary="GiftCard", tags=["GiftCard"])
async def gift_card_delete():
ins = await g.adal.first_or_404(GiftCard)
return {"items": ins}
@bp.op("/gift_card/activate", out=TGiftCard, summary="GiftCard", tags=["GiftCard"])
async def gift_card_activate():
ins = await g.adal.first_or_404(GiftCard)
return {"items": ins}
@bp.op("/gift_card/deactivate", out=TGiftCard, summary="GiftCard", tags=["GiftCard"])
async def gift_card_deactivate():
ins = await g.adal.first_or_404(GiftCard)
return {"items": ins}
| 28.698113
| 85
| 0.708087
| 222
| 1,521
| 4.68018
| 0.202703
| 0.100096
| 0.128008
| 0.181906
| 0.729548
| 0.729548
| 0.729548
| 0.729548
| 0.729548
| 0.680462
| 0
| 0.015909
| 0.13215
| 1,521
| 52
| 86
| 29.25
| 0.771212
| 0
| 0
| 0.4
| 0
| 0
| 0.170283
| 0.013807
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.114286
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
fcaf9f7d4123a89b55dda1d4700d856126c92ce9
| 746
|
py
|
Python
|
py_hcl/firrtl_ir/literal.py
|
LemniscateX/py-hcl
|
352fa8408ad51da2a94cb64270c863b46ef7596b
|
[
"MIT"
] | null | null | null |
py_hcl/firrtl_ir/literal.py
|
LemniscateX/py-hcl
|
352fa8408ad51da2a94cb64270c863b46ef7596b
|
[
"MIT"
] | null | null | null |
py_hcl/firrtl_ir/literal.py
|
LemniscateX/py-hcl
|
352fa8408ad51da2a94cb64270c863b46ef7596b
|
[
"MIT"
] | null | null | null |
from .expression import Expression
from .utils import serialize_str
class UIntLiteral(Expression):
def __init__(self, value, width):
self.value = value
self.width = width
def serialize(self, output):
output.write(b"UInt")
self.width.serialize(output)
output.write(b'("')
output.write(serialize_str(hex(self.value)[2:]))
output.write(b'")')
class SIntLiteral(Expression):
def __init__(self, value, width):
self.value = value
self.width = width
def serialize(self, output):
output.write(b"SInt")
self.width.serialize(output)
output.write(b'("')
output.write(serialize_str(hex(self.value)[2:]))
output.write(b'")')
| 25.724138
| 56
| 0.619303
| 89
| 746
| 5.067416
| 0.235955
| 0.195122
| 0.159645
| 0.159645
| 0.784922
| 0.784922
| 0.784922
| 0.784922
| 0.784922
| 0.784922
| 0
| 0.003546
| 0.243968
| 746
| 28
| 57
| 26.642857
| 0.796099
| 0
| 0
| 0.727273
| 0
| 0
| 0.021448
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.181818
| false
| 0
| 0.090909
| 0
| 0.363636
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
fcf5ebff40c5c809d23960037286fd8c54e1bd60
| 122
|
py
|
Python
|
__init__.py
|
mpcarolin/pokedex-flex-api
|
2ed38792aa53848d4445d66630663b4d32b30815
|
[
"Apache-2.0"
] | 1
|
2020-01-14T02:14:05.000Z
|
2020-01-14T02:14:05.000Z
|
__init__.py
|
mpcarolin/pokedex-flex-api
|
2ed38792aa53848d4445d66630663b4d32b30815
|
[
"Apache-2.0"
] | 2
|
2018-06-02T18:40:59.000Z
|
2020-03-10T00:03:50.000Z
|
__init__.py
|
mpcarolin/pokedex-flex-api
|
2ed38792aa53848d4445d66630663b4d32b30815
|
[
"Apache-2.0"
] | null | null | null |
import config as _config
import api
import os
if not os.path.isdir(_config.CACHE_PATH):
os.mkdir(_config.CACHE_PATH)
| 17.428571
| 41
| 0.778689
| 21
| 122
| 4.285714
| 0.52381
| 0.244444
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139344
| 122
| 6
| 42
| 20.333333
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
1e046a9e32fc3d154a244b245e490d5e2f26cde0
| 40
|
py
|
Python
|
gameplay/core.py
|
apalmer/gameplay
|
08ea189cf102100d6ee056e17103783803d07008
|
[
"MIT"
] | null | null | null |
gameplay/core.py
|
apalmer/gameplay
|
08ea189cf102100d6ee056e17103783803d07008
|
[
"MIT"
] | null | null | null |
gameplay/core.py
|
apalmer/gameplay
|
08ea189cf102100d6ee056e17103783803d07008
|
[
"MIT"
] | null | null | null |
def seagull():
print("kaw kaw mfkz")
| 20
| 25
| 0.625
| 6
| 40
| 4.166667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 40
| 2
| 25
| 20
| 0.78125
| 0
| 0
| 0
| 0
| 0
| 0.292683
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
1e18442ec50bc15891f0652d1bd494cde1aa1193
| 10,614
|
py
|
Python
|
test/unit/metrics/test_metricframe_smoke.py
|
vladiliescu/fairlearn
|
fee3f28e327ce5c36695d6b589df2d1ed2116136
|
[
"MIT"
] | null | null | null |
test/unit/metrics/test_metricframe_smoke.py
|
vladiliescu/fairlearn
|
fee3f28e327ce5c36695d6b589df2d1ed2116136
|
[
"MIT"
] | null | null | null |
test/unit/metrics/test_metricframe_smoke.py
|
vladiliescu/fairlearn
|
fee3f28e327ce5c36695d6b589df2d1ed2116136
|
[
"MIT"
] | null | null | null |
# Copyright (c) Microsoft Corporation and Fairlearn contributors.
# Licensed under the MIT License.
import numpy as np
import pandas as pd
import pytest
import sklearn.metrics as skm
import fairlearn.metrics as metrics
from .data_for_test import y_t, y_p, g_1, g_2, g_3, g_4
from test.unit.input_convertors import conversions_for_1d
@pytest.mark.parametrize("transform_y_p", conversions_for_1d)
@pytest.mark.parametrize("transform_y_t", conversions_for_1d)
def test_basic(transform_y_t, transform_y_p):
# If there are failures here, other, more specific tests should also fail
g_f = pd.DataFrame(data=g_4, columns=['My feature'])
target = metrics.MetricFrame(skm.recall_score,
transform_y_t(y_t),
transform_y_p(y_p),
sensitive_features=g_f)
# Check on the indices properties
assert target.control_levels is None
assert isinstance(target.sensitive_levels, list)
assert (target.sensitive_levels == ['My feature'])
# Check we have correct return types
assert isinstance(target.overall, float)
assert isinstance(target.by_group, pd.Series)
# Check we have expected number of elements
assert len(target.by_group) == 2
assert np.array_equal(target.by_group.index.names, ['My feature'])
recall_overall = skm.recall_score(y_t, y_p)
assert target.overall == recall_overall
mask_p = (g_4 == 'pp')
mask_q = (g_4 == 'q')
recall_p = skm.recall_score(y_t[mask_p], y_p[mask_p])
recall_q = skm.recall_score(y_t[mask_q], y_p[mask_q])
assert target.by_group['pp'] == recall_p
assert target.by_group['q'] == recall_q
target_mins = target.group_min()
assert isinstance(target_mins, float)
assert target_mins == min(recall_p, recall_q)
target_maxes = target.group_max()
assert isinstance(target_mins, float)
assert target_maxes == max(recall_p, recall_q)
@pytest.mark.parametrize("transform_y_p", conversions_for_1d)
@pytest.mark.parametrize("transform_y_t", conversions_for_1d)
def test_basic_metric_dict(transform_y_t, transform_y_p):
# If there are failures here, other, more specific tests should also fail
g_f = pd.DataFrame(data=g_4, columns=['My feature'])
target = metrics.MetricFrame({'recall': skm.recall_score},
transform_y_t(y_t),
transform_y_p(y_p),
sensitive_features=g_f)
# Check on the indices properties
assert target.control_levels is None
assert isinstance(target.sensitive_levels, list)
assert (target.sensitive_levels == ['My feature'])
# Check we have correct return types
assert isinstance(target.overall, pd.Series)
assert isinstance(target.by_group, pd.DataFrame)
# Check we have expected number of elements
assert len(target.overall) == 1
assert target.by_group.shape == (2, 1)
assert np.array_equal(target.by_group.index.names, ['My feature'])
recall_overall = skm.recall_score(y_t, y_p)
assert target.overall['recall'] == recall_overall
mask_p = (g_4 == 'pp')
mask_q = (g_4 == 'q')
recall_p = skm.recall_score(y_t[mask_p], y_p[mask_p])
recall_q = skm.recall_score(y_t[mask_q], y_p[mask_q])
assert target.by_group['recall']['pp'] == recall_p
assert target.by_group['recall']['q'] == recall_q
target_mins = target.group_min()
assert isinstance(target_mins, pd.Series)
assert len(target_mins) == 1
assert target_mins['recall'] == min(recall_p, recall_q)
target_maxes = target.group_max()
assert isinstance(target_mins, pd.Series)
assert len(target_maxes) == 1
assert target_maxes['recall'] == max(recall_p, recall_q)
@pytest.mark.parametrize("transform_y_p", conversions_for_1d)
@pytest.mark.parametrize("transform_y_t", conversions_for_1d)
def test_1m_1sf_1cf(transform_y_t, transform_y_p):
# If there are failures here, other, more specific tests should also fail
target = metrics.MetricFrame(skm.recall_score,
transform_y_t(y_t),
transform_y_p(y_p),
sensitive_features=g_2,
control_features=g_3)
# Check on the indices properties
assert isinstance(target.control_levels, list)
assert (target.control_levels == ['control_feature_0'])
assert isinstance(target.sensitive_levels, list)
assert (target.sensitive_levels == ['sensitive_feature_0'])
# Check we have correct return types
assert isinstance(target.overall, pd.Series)
assert isinstance(target.by_group, pd.Series)
mask_f = (g_2 == 'f')
mask_g = (g_2 == 'g')
mask_k = (g_3 == 'kk')
mask_m = (g_3 == 'm')
# Check we have expected number of elements
assert len(target.overall) == 2
assert len(target.by_group) == 4
recall_k = skm.recall_score(y_t[mask_k], y_p[mask_k])
recall_m = skm.recall_score(y_t[mask_m], y_p[mask_m])
assert target.overall['kk'] == recall_k
assert target.overall['m'] == recall_m
mask_k_f = np.logical_and(mask_k, mask_f)
mask_k_g = np.logical_and(mask_k, mask_g)
mask_m_f = np.logical_and(mask_m, mask_f)
mask_m_g = np.logical_and(mask_m, mask_g)
recall_k_f = skm.recall_score(y_t[mask_k_f], y_p[mask_k_f])
recall_m_f = skm.recall_score(y_t[mask_m_f], y_p[mask_m_f])
recall_k_g = skm.recall_score(y_t[mask_k_g], y_p[mask_k_g])
recall_m_g = skm.recall_score(y_t[mask_m_g], y_p[mask_m_g])
assert target.by_group[('kk', 'f')] == recall_k_f
assert target.by_group[('kk', 'g')] == recall_k_g
assert target.by_group[('m', 'f')] == recall_m_f
assert target.by_group[('m', 'g')] == recall_m_g
recall_k_arr = [recall_k_f, recall_k_g]
recall_m_arr = [recall_m_f, recall_m_g]
target_mins = target.group_min()
assert isinstance(target_mins, pd.Series)
assert len(target_mins) == 2
assert target_mins['kk'] == min(recall_k_arr)
assert target_mins['m'] == min(recall_m_arr)
target_maxs = target.group_max()
assert isinstance(target_mins, pd.Series)
assert len(target_maxs) == 2
assert target_maxs['kk'] == max(recall_k_arr)
assert target_maxs['m'] == max(recall_m_arr)
@pytest.mark.parametrize("transform_y_p", conversions_for_1d)
@pytest.mark.parametrize("transform_y_t", conversions_for_1d)
def test_1m_1sf_1cf_metric_dict(transform_y_t, transform_y_p):
# If there are failures here, other, more specific tests should also fail
target = metrics.MetricFrame({'recall': skm.recall_score},
transform_y_t(y_t),
transform_y_p(y_p),
sensitive_features=g_2,
control_features=g_3)
# Check on the indices properties
assert isinstance(target.control_levels, list)
assert (target.control_levels == ['control_feature_0'])
assert isinstance(target.sensitive_levels, list)
assert (target.sensitive_levels == ['sensitive_feature_0'])
# Check we have correct return types
assert isinstance(target.overall, pd.DataFrame)
assert isinstance(target.by_group, pd.DataFrame)
mask_f = (g_2 == 'f')
mask_g = (g_2 == 'g')
mask_k = (g_3 == 'kk')
mask_m = (g_3 == 'm')
# Check we have expected number of elements
assert target.overall.shape == (2, 1)
assert target.by_group.shape == (4, 1)
recall_k = skm.recall_score(y_t[mask_k], y_p[mask_k])
recall_m = skm.recall_score(y_t[mask_m], y_p[mask_m])
assert target.overall['recall']['kk'] == recall_k
assert target.overall['recall']['m'] == recall_m
mask_k_f = np.logical_and(mask_k, mask_f)
mask_k_g = np.logical_and(mask_k, mask_g)
mask_m_f = np.logical_and(mask_m, mask_f)
mask_m_g = np.logical_and(mask_m, mask_g)
recall_k_f = skm.recall_score(y_t[mask_k_f], y_p[mask_k_f])
recall_m_f = skm.recall_score(y_t[mask_m_f], y_p[mask_m_f])
recall_k_g = skm.recall_score(y_t[mask_k_g], y_p[mask_k_g])
recall_m_g = skm.recall_score(y_t[mask_m_g], y_p[mask_m_g])
assert target.by_group['recall'][('kk', 'f')] == recall_k_f
assert target.by_group['recall'][('kk', 'g')] == recall_k_g
assert target.by_group['recall'][('m', 'f')] == recall_m_f
assert target.by_group['recall'][('m', 'g')] == recall_m_g
recall_k_arr = [recall_k_f, recall_k_g]
recall_m_arr = [recall_m_f, recall_m_g]
target_mins = target.group_min()
assert isinstance(target_mins, pd.DataFrame)
assert target_mins.shape == (2, 1)
assert target_mins['recall']['kk'] == min(recall_k_arr)
assert target_mins['recall']['m'] == min(recall_m_arr)
target_maxs = target.group_max()
assert isinstance(target_mins, pd.DataFrame)
assert target_maxs.shape == (2, 1)
assert target_maxs['recall']['kk'] == max(recall_k_arr)
assert target_maxs['recall']['m'] == max(recall_m_arr)
def test_duplicate_sf_names():
groups = pd.DataFrame(np.stack([g_2, g_3], axis=1), columns=["A", "A"])
msg = "Detected duplicate feature name: 'A'"
with pytest.raises(ValueError) as execInfo:
_ = metrics.MetricFrame(skm.recall_score,
y_t,
y_p,
sensitive_features=groups)
assert execInfo.value.args[0] == msg
def test_duplicate_cf_names():
groups = pd.DataFrame(np.stack([g_2, g_3], axis=1), columns=["B", "B"])
msg = "Detected duplicate feature name: 'B'"
with pytest.raises(ValueError) as execInfo:
_ = metrics.MetricFrame(skm.recall_score,
y_t,
y_p,
sensitive_features=g_4,
control_features=groups)
assert execInfo.value.args[0] == msg
def test_duplicate_cf_sf_names():
cf = pd.DataFrame(np.stack([g_2, g_3], axis=1), columns=["A", "B"])
sf = {"B": g_1, "C": g_4}
msg = "Detected duplicate feature name: 'B'"
with pytest.raises(ValueError) as execInfo:
_ = metrics.MetricFrame(skm.recall_score,
y_t,
y_p,
sensitive_features=sf,
control_features=cf)
assert execInfo.value.args[0] == msg
def test_single_element_lists():
mf = metrics.MetricFrame(skm.balanced_accuracy_score,
[1], [1], sensitive_features=[0])
assert mf.overall == 1
| 39.457249
| 77
| 0.653005
| 1,579
| 10,614
| 4.065864
| 0.088664
| 0.080374
| 0.054517
| 0.049065
| 0.912461
| 0.884735
| 0.864486
| 0.836137
| 0.802492
| 0.774766
| 0
| 0.009525
| 0.228472
| 10,614
| 268
| 78
| 39.604478
| 0.774454
| 0.077162
| 0
| 0.631313
| 0
| 0
| 0.052578
| 0
| 0
| 0
| 0
| 0
| 0.39899
| 1
| 0.040404
| false
| 0
| 0.035354
| 0
| 0.075758
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1e34f9464291454031f96b644776087a1d2d2dea
| 199
|
py
|
Python
|
students/d33101/wangyixin/Lr2/homework/admin.py
|
losepower/ITMO_ICT_WebDevelopment_2021-2022_D3310
|
4db490a8f14bcad4555b8a50ca8db1674ed87b76
|
[
"MIT"
] | null | null | null |
students/d33101/wangyixin/Lr2/homework/admin.py
|
losepower/ITMO_ICT_WebDevelopment_2021-2022_D3310
|
4db490a8f14bcad4555b8a50ca8db1674ed87b76
|
[
"MIT"
] | null | null | null |
students/d33101/wangyixin/Lr2/homework/admin.py
|
losepower/ITMO_ICT_WebDevelopment_2021-2022_D3310
|
4db490a8f14bcad4555b8a50ca8db1674ed87b76
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from . import models
# Register your models here.
admin.site.register(models.Student)
admin.site.register(models.Homework)
admin.site.register(models.Studenttopic)
| 22.111111
| 40
| 0.81407
| 27
| 199
| 6
| 0.481481
| 0.166667
| 0.314815
| 0.425926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090452
| 199
| 8
| 41
| 24.875
| 0.895028
| 0.130653
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
1e4eba328311961f443abd8c56d944d814a9b48b
| 7,305
|
py
|
Python
|
src/encoded/tests/test_reports_search.py
|
procha2/encoded
|
e9f122362b71f3b8641023b8d2d5ad531d3484b7
|
[
"MIT"
] | 102
|
2015-05-20T01:17:43.000Z
|
2022-03-07T06:03:55.000Z
|
src/encoded/tests/test_reports_search.py
|
procha2/encoded
|
e9f122362b71f3b8641023b8d2d5ad531d3484b7
|
[
"MIT"
] | 901
|
2015-01-07T23:11:57.000Z
|
2022-03-18T13:56:12.000Z
|
src/encoded/tests/test_reports_search.py
|
procha2/encoded
|
e9f122362b71f3b8641023b8d2d5ad531d3484b7
|
[
"MIT"
] | 65
|
2015-02-06T23:00:26.000Z
|
2022-01-22T07:58:44.000Z
|
import pytest
from encoded.tests.features.conftest import app, app_settings, index_workbook
pytestmark = [
pytest.mark.indexing,
pytest.mark.usefixtures('index_workbook'),
]
def test_reports_search_batched_search_generator_init(dummy_request):
from encoded.reports.search import BatchedSearchGenerator
dummy_request.environ['QUERY_STRING'] = (
'type=Experiment'
)
bsg = BatchedSearchGenerator(dummy_request)
assert isinstance(bsg, BatchedSearchGenerator)
assert bsg.batch_field == '@id'
assert bsg.batch_size == 5000
assert bsg.param_list == {'type': ['Experiment']}
assert bsg.batch_param_values == []
def test_reports_search_batched_search_generator_make_batched_values_from_batch_param_values(dummy_request):
from encoded.reports.search import BatchedSearchGenerator
dummy_request.environ['QUERY_STRING'] = (
'type=Experiment'
)
bsg = BatchedSearchGenerator(dummy_request)
assert list(bsg._make_batched_values_from_batch_param_values()) == []
from encoded.reports.metadata import BatchedSearchGenerator
dummy_request.environ['QUERY_STRING'] = (
'type=Experiment&@id=/files/ENCFFABC123/'
'&@id=/files/ENCFFABC345/&@id=/files/ENCFFABC567/'
'&@id=/files/ENCFFABC789/&@id=/files/ENCFFDEF123/'
'&@id=/files/ENCFFDEF345/&@id=/files/ENCFFDEF567/'
)
bsg = BatchedSearchGenerator(dummy_request, batch_size=2)
assert list(bsg._make_batched_values_from_batch_param_values()) == [
['/files/ENCFFABC123/', '/files/ENCFFABC345/'],
['/files/ENCFFABC567/', '/files/ENCFFABC789/'],
['/files/ENCFFDEF123/', '/files/ENCFFDEF345/'],
['/files/ENCFFDEF567/']
]
bsg = BatchedSearchGenerator(dummy_request, batch_field='accession', batch_size=2)
assert list(bsg._make_batched_values_from_batch_param_values()) == []
dummy_request.environ['QUERY_STRING'] = (
'type=Experiment&@id=/files/ENCFFABC123/'
'&@id=/files/ENCFFABC345/&@id=/files/ENCFFABC567/'
'&@id=/files/ENCFFABC789/&@id=/files/ENCFFDEF123/'
'&@id=/files/ENCFFDEF345/&@id=/files/ENCFFDEF567/'
'&accession=ENCFFAAA111'
)
bsg = BatchedSearchGenerator(dummy_request, batch_field='accession')
assert next(bsg._make_batched_values_from_batch_param_values()) == ['ENCFFAAA111']
def test_reports_search_batched_search_generator_make_batched_params_from_batched_values(dummy_request):
from encoded.reports.search import BatchedSearchGenerator
dummy_request.environ['QUERY_STRING'] = (
'type=Experiment&@id=/files/ENCFFABC123/'
'&@id=/files/ENCFFABC345/&@id=/files/ENCFFABC567/'
'&@id=/files/ENCFFABC789/&@id=/files/ENCFFDEF123/'
'&@id=/files/ENCFFDEF345/&@id=/files/ENCFFDEF567/'
)
bsg = BatchedSearchGenerator(dummy_request, batch_size=2)
actual_batched_params = []
for batched_values in bsg._make_batched_values_from_batch_param_values():
actual_batched_params.append(
bsg._make_batched_params_from_batched_values(batched_values)
)
expected_batched_params = [
[('@id', '/files/ENCFFABC123/'), ('@id', '/files/ENCFFABC345/')],
[('@id', '/files/ENCFFABC567/'), ('@id', '/files/ENCFFABC789/')],
[('@id', '/files/ENCFFDEF123/'), ('@id', '/files/ENCFFDEF345/')],
[('@id', '/files/ENCFFDEF567/')]
]
assert expected_batched_params == actual_batched_params
def test_reports_search_batched_search_generator_build_new_request(dummy_request):
from encoded.reports.search import BatchedSearchGenerator
dummy_request.environ['QUERY_STRING'] = (
'type=Experiment&@id=/files/ENCFFABC123/'
'&@id=/files/ENCFFABC345/&@id=/files/ENCFFABC567/'
'&@id=/files/ENCFFABC789/&@id=/files/ENCFFDEF123/'
'&@id=/files/ENCFFDEF345/&@id=/files/ENCFFDEF567/'
)
bsg = BatchedSearchGenerator(dummy_request, batch_size=2)
batched_params = [('@id', '/files/ENCFFABC123/'), ('@id', '/files/ENCFFABC345/')]
request = bsg._build_new_request(batched_params)
assert str(request.query_string) == (
'type=Experiment'
'&%40id=%2Ffiles%2FENCFFABC123%2F'
'&%40id=%2Ffiles%2FENCFFABC345%2F'
'&limit=all'
)
assert request.path_info == '/search/'
assert request.registry
dummy_request.environ['QUERY_STRING'] = (
'type=Experiment&@id=/files/ENCFFABC123/'
'&@id=/files/ENCFFABC345/&@id=/files/ENCFFABC567/'
'&@id=/files/ENCFFABC789/&@id=/files/ENCFFDEF123/'
'&@id=/files/ENCFFDEF345/&@id=/files/ENCFFDEF567/'
'&field=accession&files.status=released'
)
bsg = BatchedSearchGenerator(dummy_request, batch_size=2)
batched_params = [('@id', '/files/ENCFFABC123/'), ('@id', '/files/ENCFFABC345/')]
request = bsg._build_new_request(batched_params)
assert request.query_string == (
'type=Experiment&field=accession&files.status=released'
'&%40id=%2Ffiles%2FENCFFABC123%2F'
'&%40id=%2Ffiles%2FENCFFABC345%2F'
'&limit=all'
)
assert request.path_info == '/search/'
assert request.registry
def test_reports_search_batched_search_generator_results(index_workbook, dummy_request):
from encoded.reports.search import BatchedSearchGenerator
dummy_request.environ['QUERY_STRING'] = (
'type=Experiment&field=@id&field=status'
)
bsg = BatchedSearchGenerator(dummy_request)
results = list(bsg.results())
assert len(results) >= 63, f'{len(results)} not expected'
dummy_request.environ['QUERY_STRING'] = (
'type=Experiment&@id=/experiments/ENCSR001ADI/'
'&field=@id&field=status'
)
bsg = BatchedSearchGenerator(dummy_request)
results = list(bsg.results())
assert len(results) == 1
dummy_request.environ['QUERY_STRING'] = (
'type=Experiment'
'&@id=/experiments/ENCSR001ADI/'
'&@id=/experiments/ENCSR003CON/'
'&@id=/experiments/ENCSR000ACY/'
'&@id=/experiments/ENCSR001CON/'
'&@id=/experiments/ENCSR751STT/'
'&@id=/experiments/ENCSR604DNT/'
'&@id=/experiments/ENCSR001SER/'
'&@id=/experiments/ENCSR000AEM/'
'&@id=/experiments/ENCSR334EJI/'
'&@id=/experiments/ENCSR123AAD/'
'&field=@id&field=status'
)
bsg = BatchedSearchGenerator(dummy_request)
results = list(bsg.results())
assert len(results) == 10
for result in results:
# (@type, @id, status)
assert len(result.keys()) == 3
bsg = BatchedSearchGenerator(dummy_request, batch_size=2)
results = list(bsg.results())
assert len(results) == 10
for result in results:
assert len(result.keys()) == 3
bsg = BatchedSearchGenerator(dummy_request, batch_size=3)
results = list(bsg.results())
assert len(results) == 10
for result in results:
assert len(result.keys()) == 3
bsg = BatchedSearchGenerator(dummy_request, batch_size=5)
results = list(bsg.results())
assert len(results) == 10
for result in results:
assert len(result.keys()) == 3
bsg = BatchedSearchGenerator(dummy_request, batch_field='accession')
results = list(bsg.results())
assert len(results) == 10
for result in results:
assert len(result.keys()) == 3
| 41.505682
| 108
| 0.67666
| 775
| 7,305
| 6.15871
| 0.126452
| 0.067463
| 0.149591
| 0.116279
| 0.818563
| 0.791326
| 0.780641
| 0.724282
| 0.704798
| 0.678399
| 0
| 0.043218
| 0.176454
| 7,305
| 175
| 109
| 41.742857
| 0.750166
| 0.002738
| 0
| 0.565217
| 0
| 0
| 0.311959
| 0.21763
| 0
| 0
| 0
| 0
| 0.173913
| 1
| 0.031056
| false
| 0
| 0.049689
| 0
| 0.080745
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1e655231080235ee0726408df48388860365c1d1
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/ptyprocess/ptyprocess.py
|
Retraces/UkraineBot
|
3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/ptyprocess/ptyprocess.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/ptyprocess/ptyprocess.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/b2/4d/ac/536236d98ca5d60537163166a562f7078de8d0aa86ddddc223caf436af
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.40625
| 0
| 96
| 1
| 96
| 96
| 0.489583
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1e80eaa3d2d41e85eb0f375afcda04cea23784ce
| 18,961
|
py
|
Python
|
master/pyext/src/validation/sas_plots.py
|
salilab/IHMValidation
|
ddf1a080a4b7f66c2f067312f5f4a5c6584848d1
|
[
"MIT"
] | null | null | null |
master/pyext/src/validation/sas_plots.py
|
salilab/IHMValidation
|
ddf1a080a4b7f66c2f067312f5f4a5c6584848d1
|
[
"MIT"
] | 23
|
2020-12-09T22:27:29.000Z
|
2022-03-30T18:01:43.000Z
|
master/pyext/src/validation/sas_plots.py
|
salilab/IHMValidation
|
ddf1a080a4b7f66c2f067312f5f4a5c6584848d1
|
[
"MIT"
] | 1
|
2022-03-21T22:55:24.000Z
|
2022-03-21T22:55:24.000Z
|
###################################
# Script :
# 1) Contains class to generate SAS
# plots
# 2) Inherits from SAS class
#
# ganesans - Salilab - UCSF
# ganesans@salilab.org
###################################
import pandas as pd
import glob
import sys,os,math
import numpy as np
import pandas as pd
from validation import sas, get_input_information
from bokeh.io import output_file, show, curdoc, export_png, export_svgs
from bokeh.models import Span,ColumnDataSource, LinearAxis, Legend
from bokeh.palettes import GnBu3, RdBu,OrRd3,Blues,YlOrBr, Spectral6, Set1
from bokeh.plotting import figure, output_file, save
from bokeh.models.widgets import Tabs, Panel
from bokeh.layouts import row,column
import multiprocessing as mp
class sas_validation_plots(sas.sas_validation):
def __init__(self,mmcif_file):
super().__init__(mmcif_file)
self.ID=str(get_input_information.get_id(self))
self.df_dict=sas.sas_validation.modify_intensity(self)
self.pdf_dict=sas.sas_validation.get_pddf(self)
self.fdf_dict=sas.sas_validation.get_fit_data(self)
self.pdf_ext_dict=sas.sas_validation.get_pofr_ext(self)
self.pdf_dict_err=sas.sas_validation.get_pofr_errors(self)
self.score,self.gdf=sas.sas_validation.get_Guinier_data(self)
self.filename = os.path.join('../static/images//')
def plot_intensities(self,sasbdb:str,df:pd.DataFrame):
'''
plot intensities with errors
'''
print (type(df))
output_file(self.ID+sasbdb+"intensities.html",mode="inline")
source = ColumnDataSource(df)
p = figure(plot_height=500, plot_width=500, title="Log I(q) vs q with error bars ("+sasbdb+")")
p.circle(x='Q',y='logI',source=source, color='blue',fill_alpha=0.3,size=5)
p.multi_line('err_x','err_y',source=source, color='gray',line_width=0.5)
p.xaxis.major_label_text_font_size="14pt"
p.yaxis.major_label_text_font_size="14pt"
p.title.text_font_size='12pt'
p.title.align="center"
p.title.vertical_align='top'
p.xaxis.axis_label = "q [nm\u207B\u00B9]"
p.xaxis.axis_label_text_font_size='14pt'
p.yaxis.axis_label = 'Log I(q) [a.u]'
p.yaxis.axis_label_text_font_size='14pt'
save(p,filename=self.filename+'/'+self.ID+sasbdb+"intensities.html")
print (self.filename+'/'+self.ID+sasbdb+"intensities.html")
p.output_backend="svg"
export_svgs(p,height=500, width=500,filename=self.filename+'/'+self.ID+sasbdb+"intensities.svg")
def plot_intensities_log(self,sasbdb:str,df:pd.DataFrame):
'''
plot intensities on a log scale with errors
'''
output_file(self.ID+sasbdb+"intensities_log.html",mode="inline")
source = ColumnDataSource(df)
p = figure(plot_height=500, plot_width=500, title="Log I(q) vs Log q with error bars ("+sasbdb+")")
p.circle(x='logQ',y='logI',source=source,color='blue',fill_alpha=0.3,size=5)
p.multi_line('logX','err_y',source=source, color='gray',line_width=0.5)
p.xaxis.major_label_text_font_size="14pt"
p.yaxis.major_label_text_font_size="14pt"
p.title.text_font_size='12pt'
p.title.align="center"
p.title.vertical_align='top'
p.xaxis.axis_label = 'Log q [nm\u207B\u00B9]'
p.xaxis.axis_label_text_font_size='14pt'
p.yaxis.axis_label = 'Log I(q) [a.u]'
p.yaxis.axis_label_text_font_size='14pt'
save(p,filename=self.filename+'/'+self.ID+sasbdb+"intensities_log.html")
p.output_backend="svg"
export_svgs(p,height=500, width=500,filename=self.filename+'/'+self.ID+sasbdb+"intensities_log.svg")
def plot_kratky_dep(self,sasbdb:str,df:pd.DataFrame):
'''
plot kratky plot, deprecated function
'''
output_file(self.ID+sasbdb+"Kratky_dep.html",mode="inline")
source = ColumnDataSource(df)
p = figure(plot_height=500, plot_width=500, title="Kratky plot ("+sasbdb+")")
p.circle(x='Q',y='Ky',source=source,color='blue',fill_alpha=0.3,size=5)
p.xaxis.major_label_text_font_size="14pt"
p.yaxis.major_label_text_font_size="14pt"
p.title.text_font_size='12pt'
p.title.align="center"
p.title.vertical_align='top'
p.xaxis.axis_label = 'Log q [nm\u207B\u00B9]'
p.xaxis.axis_label_text_font_size='14pt'
p.yaxis.axis_label = 'q\u00B2 I(q)'
p.yaxis.axis_label_text_font_size='14pt'
save(p,filename=self.filename+'/'+self.ID+sasbdb+"Kratky_dep.html")
p.output_backend="svg"
export_svgs(p,filename=self.filename+'/'+self.ID+sasbdb+"Kratky_dep.svg")
def plot_kratky(self,sasbdb:str,df:pd.DataFrame):
'''
plot dimensionless kratky
'''
output_file(self.ID+sasbdb+"Kratky.html",mode="inline")
source = ColumnDataSource(df)
p = figure(plot_height=500, plot_width=500, title="Dimensionless Kratky plot ("+sasbdb+")")
p.circle(x='Kx',y='Ky',source=source,color='blue',fill_alpha=0.3,size=5)
#vline = Span(location=0.1732, dimension='height', line_color='red', line_width=3)
#hline = Span(location=0.1104, dimension='width', line_color='green', line_width=3)
#p.renderers.extend([vline, hline])
p.xaxis.major_label_text_font_size="14pt"
p.yaxis.major_label_text_font_size="14pt"
p.title.text_font_size='12pt'
p.title.align="center"
p.title.vertical_align='top'
p.xaxis.axis_label = 'qRg'
p.xaxis.axis_label_text_font_size='14pt'
p.yaxis.axis_label = 'q\u00B2 Rg\u00B2 I(q)/I(0)'
p.yaxis.axis_label_text_font_size='14pt'
save(p,filename=self.filename+'/'+self.ID+sasbdb+"Kratky.html")
p.output_backend="svg"
export_svgs(p,filename=self.filename+'/'+self.ID+sasbdb+"Kratky.svg")
def plot_porod_debye(self,sasbdb:str,df:pd.DataFrame):
'''
porod debye plot for flexibility
'''
output_file(self.ID+sasbdb+"porod.html",mode="inline")
source = ColumnDataSource(df)
p = figure(plot_height=500, plot_width=500, title="Porod-Debye plot ("+sasbdb+")")
p.circle(x='Px',y='Py',source=source,color='blue',fill_alpha=0.3,size=5)
p.xaxis.major_label_text_font_size="14pt"
p.yaxis.major_label_text_font_size="14pt"
p.title.text_font_size='12pt'
p.title.align="center"
p.title.vertical_align='top'
p.xaxis.axis_label = 'q \u2074'
p.xaxis.axis_label_text_font_size='14pt'
p.yaxis.axis_label = 'q\u2074 I(q)'
p.yaxis.axis_label_text_font_size='14pt'
p.output_backend="svg"
save(p,filename=self.filename+'/'+self.ID+sasbdb+"porod.html")
export_svgs(p,filename=self.filename+'/'+self.ID+sasbdb+"porod.svg")
def plot_pddf(self,sasbdb:str,df:pd.DataFrame):
'''
p(r) plot, deprecated function
'''
output_file(self.ID+sasbdb+"pddf.html",mode="inline")
source = ColumnDataSource(df)
p = figure(plot_height=500, plot_width=500, title="Pair distance distribution function ("+sasbdb+")")
p.circle(x='R',y='P',source=source,color='blue',fill_alpha=0.3,size=5)
p.multi_line('err_x','err_y',source=source, color='gray',line_width=1.5)
p.xaxis.major_label_text_font_size="14pt"
p.yaxis.major_label_text_font_size="14pt"
p.title.text_font_size='12pt'
p.title.align="center"
p.title.vertical_align='top'
p.xaxis.axis_label = "r [nm]"
p.xaxis.axis_label_text_font_size='14pt'
p.yaxis.axis_label = 'P(r)'
p.yaxis.axis_label_text_font_size='14pt'
p.output_backend="svg"
save(p,filename=self.filename+'/'+self.ID+sasbdb+"pddf.html")
export_svgs(p,filename=self.filename+'/'+self.ID+sasbdb+"pddf.svg")
def plot_pddf_residuals(self,sasbdb:str,df:pd.DataFrame):
'''
p(r) residuals
'''
output_file(self.ID+sasbdb+"pddf_residuals.html",mode="inline")
source = ColumnDataSource(df)
p = figure(plot_height=500, plot_width=500, title="Residuals for P(r) fit ("+sasbdb+")")
p.circle(x='Q',y='R',source=source, color='blue',fill_alpha=0.3,size=5)
hline = Span(location=0, dimension='width', line_color='red', line_width=3)
p.renderers.extend([hline])
p.xaxis.major_label_text_font_size="14pt"
p.yaxis.major_label_text_font_size="14pt"
p.title.text_font_size='12pt'
p.title.align="center"
p.title.vertical_align='top'
p.xaxis.axis_label = "q\u00B2 [nm \u00B2]"#\u212B\u207B\u00B2"
p.xaxis.axis_label_text_font_size='14pt'
p.yaxis.axis_label = 'R'
p.yaxis.axis_label_text_font_size='14pt'
save(p,filename=self.filename+'/'+self.ID+sasbdb+"pddf_residuals.html")
p.output_backend="svg"
export_svgs(p,filename=self.filename+'/'+self.ID+sasbdb+"pddf_residuals.svg")
def plot_pddf_residuals_wt(self,sasbdb:str,df:pd.DataFrame):
'''
p(r) error weighted residuals
'''
output_file(self.ID+sasbdb+"pddf_residuals_wt.html",mode="inline")
source = ColumnDataSource(df)
p = figure(plot_height=500, plot_width=500, title="Error weighted residuals for P(r) fit ("+sasbdb+")")
p.circle(x='Q',y='WR',source=source, color='blue',fill_alpha=0.3,size=5)
hline = Span(location=0, dimension='width', line_color='red', line_width=3)
p.renderers.extend([hline])
p.xaxis.major_label_text_font_size="14pt"
p.yaxis.major_label_text_font_size="14pt"
p.title.text_font_size='12pt'
p.title.align="center"
p.title.vertical_align='top'
p.xaxis.axis_label = "q\u00B2 [nm \u00B2]"#\u212B\u207B\u00B2"
p.xaxis.axis_label_text_font_size='14pt'
p.yaxis.axis_label = 'R/\u03C3'
p.yaxis.axis_label_text_font_size='14pt'
save(p,filename=self.filename+'/'+self.ID+sasbdb+"pddf_residuals_wt.html")
p.output_backend="svg"
export_svgs(p,filename=self.filename+'/'+self.ID+sasbdb+"pddf_residuals_wt.svg")
def plot_pddf_int(self,sasbdb:str,df_int:pd.DataFrame,df_pofr:pd.DataFrame):
'''
p(r) with fit
'''
output_file(self.ID+sasbdb+"pddf_int.html",mode="inline")
source1 = ColumnDataSource(df_int)
source2=ColumnDataSource(df_pofr)
p = figure(plot_height=500, plot_width=500, title="P(r) extrapolated fit for "+sasbdb)
legend1='Experimental data';legend2="Linear fit"
p.circle(x='Q',y='logI',source=source1, color='blue',line_width=1,fill_alpha=0.3,size=3,legend_label=legend1)
p.line(x='Q',y='logI',source=source2, color='red',line_width=3,legend_label=legend2)
#p.circle(x='Q',y='logIb',source=source, color='red',line_width=1,fill_alpha=0.1,size=3,legend_label=legend2)
p.legend.orientation = "vertical"
p.legend.location = "top_right"
p.xaxis.major_label_text_font_size="14pt"
p.yaxis.major_label_text_font_size="14pt"
p.title.text_font_size='12pt'
p.title.align="center"
p.title.vertical_align='top'
p.xaxis.axis_label = "q [\u212B\u207B\u207B\u00B9]"
p.xaxis.axis_label_text_font_size='14pt'
p.yaxis.axis_label = 'Log I(q) [a.u]'
p.yaxis.axis_label_text_font_size='14pt'
save(p,filename=self.filename+'/'+self.ID+sasbdb+"pddf_int.html")
p.output_backend="svg"
export_svgs(p,filename=self.filename+'/'+self.ID+sasbdb+"pddf_int.svg")
def Guinier_plot_fit(self,sasbdb:str,df:pd.DataFrame,score:int):
'''
Gunier plot with fit
'''
output_file(self.ID+sasbdb+"guinier.html",mode="inline")
source = ColumnDataSource(df)
p = figure(plot_height=500, plot_width=500, title="Guinier plot for "+sasbdb+" (R\u00B2="+str(score)+")")
legend1='Experimental data';legend2="Linear fit"
p.circle(x='Q2A',y='logI',source=source, color='blue',line_width=1,fill_alpha=0.3,size=5,legend_label=legend1)
p.line(x='Q2A',y='y_pred',source=source, color='red',line_width=3,legend_label=legend2)
p.legend.orientation = "vertical"
p.legend.location = "top_right"
p.xaxis.major_label_text_font_size="14pt"
p.yaxis.major_label_text_font_size="14pt"
p.title.text_font_size='12pt'
p.title.align="center"
p.title.vertical_align='top'
p.xaxis.axis_label = "q\u00B2 [nm \u00B2]" #\u212B\u207B\u00B2"
p.xaxis.axis_label_text_font_size='14pt'
p.yaxis.axis_label = 'Log I(q)'
p.yaxis.axis_label_text_font_size='14pt'
save(p,filename=self.filename+'/'+self.ID+sasbdb+"guinier.html")
p.output_backend="svg"
export_svgs(p,filename=self.filename+'/'+self.ID+sasbdb+"guinier.svg")
def Guinier_plot_residuals(self,sasbdb:str,df:pd.DataFrame):
'''
Guinier plot residuals
'''
output_file(self.ID+sasbdb+"guinier_residuals.html",mode="inline")
source = ColumnDataSource(df)
p = figure(plot_height=500, plot_width=500, title="Residuals for Guinier plot fit ("+sasbdb+")")
p.circle(x='Q2A',y='res',source=source, color='blue',fill_alpha=0.3,size=5)
hline = Span(location=0, dimension='width', line_color='red', line_width=3)
p.renderers.extend([hline])
p.xaxis.major_label_text_font_size="14pt"
p.yaxis.major_label_text_font_size="14pt"
p.title.text_font_size='12pt'
p.title.align="center"
p.title.vertical_align='top'
p.xaxis.axis_label = "q\u00B2 [nm \u00B2]"#\u212B\u207B\u00B2"
p.xaxis.axis_label_text_font_size='14pt'
p.yaxis.axis_label = 'R'
p.yaxis.axis_label_text_font_size='14pt'
save(p,filename=self.filename+'/'+self.ID+sasbdb+"guinier_residuals.html")
p.output_backend="svg"
export_svgs(p,filename=self.filename+'/'+self.ID+sasbdb+"guinier_residuals.svg")
def plot_fit(self,sasbdb:str,fit:int,score:int,df:pd.DataFrame):
'''
plot chi-squared fit
'''
output_file(self.ID+sasbdb+str(fit)+"fit1.html",mode="inline")
source = ColumnDataSource(df)
p = figure(plot_height=500, plot_width=500, title="Model fit for "+sasbdb)
legend1='Experimental data';legend2="Linear fit"
p.circle(x='Q',y='logIe',source=source, color='blue',line_width=1,fill_alpha=0.3,size=3,legend_label=legend1)
p.line(x='Q',y='logIb',source=source, color='red',line_width=3,legend_label=legend2)
#p.circle(x='Q',y='logIb',source=source, color='red',line_width=1,fill_alpha=0.1,size=3,legend_label=legend2)
p.legend.orientation = "vertical"
p.legend.location = "top_right"
p.xaxis.major_label_text_font_size="14pt"
p.yaxis.major_label_text_font_size="14pt"
p.title.text_font_size='12pt'
p.title.align="center"
p.title.vertical_align='top'
p.xaxis.axis_label = "q [\u212B\u207B\u207B\u00B9]"
p.xaxis.axis_label_text_font_size='14pt'
p.yaxis.axis_label = 'Log I(q)'
p.yaxis.axis_label_text_font_size='14pt'
save(p,filename=self.filename+'/'+self.ID+sasbdb+str(fit)+"fit1.html")
p.output_backend="svg"
export_svgs(p,filename=self.filename+'/'+self.ID+sasbdb+str(fit)+"fit1.svg")
def plot_fit_residuals(self,sasbdb:str,fit:int,df:pd.DataFrame):
'''
plot residuals for each fit
'''
output_file(self.ID+sasbdb+str(fit)+"residuals.html",mode="inline")
source = ColumnDataSource(df)
p = figure(plot_height=500, plot_width=500, title="Residuals for model fit ("+sasbdb+")")
p.circle(x='Q',y='r',source=source, color='blue',fill_alpha=0.3,size=5)
hline = Span(location=0, dimension='width', line_color='red', line_width=3)
p.renderers.extend([hline])
p.xaxis.major_label_text_font_size="14pt"
p.yaxis.major_label_text_font_size="14pt"
p.title.text_font_size='12pt'
p.title.align="center"
p.title.vertical_align='top'
p.xaxis.axis_label = "q [\u212B\u207B\u207B\u00B9]"
p.xaxis.axis_label_text_font_size='14pt'
p.yaxis.axis_label = 'R'
p.yaxis.axis_label_text_font_size='14pt'
save(p,filename=self.filename+'/'+self.ID+sasbdb+str(fit)+"residuals.html")
p.output_backend="svg"
export_svgs(p,filename=self.filename+'/'+self.ID+sasbdb+str(fit)+"residuals.svg")
def plot_fit_residuals_wt(self,sasbdb:str,fit:int,df:pd.DataFrame):
'''
plot error weighted residuals for each fit
'''
output_file(self.ID+sasbdb+str(fit)+"residuals_wt.html",mode="inline")
source = ColumnDataSource(df)
p = figure(plot_height=500, plot_width=500, title="Error-weighted residuals for model fit ("+sasbdb+")")
p.circle(x='Q',y='rsigma',source=source, color='blue',fill_alpha=0.3,size=5)
hline = Span(location=0, dimension='width', line_color='red', line_width=3)
p.renderers.extend([hline])
p.xaxis.major_label_text_font_size="14pt"
p.yaxis.major_label_text_font_size="14pt"
p.title.text_font_size='12pt'
p.title.align="center"
p.title.vertical_align='top'
p.xaxis.axis_label = "q [\u212B\u207B\u207B\u00B9]"
p.xaxis.axis_label_text_font_size='14pt'
p.yaxis.axis_label = 'R/\u03C3'
p.yaxis.axis_label_text_font_size='14pt'
save(p,filename=self.filename+'/'+self.ID+sasbdb+str(fit)+"residuals_wt.html")
p.output_backend="svg"
export_svgs(p,filename=self.filename+'/'+self.ID+sasbdb+str(fit)+"residuals_wt.svg")
def plot_multiple(self):
for key,val in self.df_dict.items():
self.plot_intensities(key,val)
self.plot_intensities_log(key,val)
self.plot_kratky(key,val)
#self.plot_kratky_dim(key,val)
self.plot_porod_debye(key,val)
self.plot_pddf_int(key,val,self.pdf_ext_dict[key])
def plot_Guinier(self):
for key,val in self.gdf.items():
self.Guinier_plot_fit(key,val,self.score[key])
self.Guinier_plot_residuals(key,val)
def plot_pf(self):
for key,val in self.pdf_dict.items():
self.plot_pddf(key,val)
self.plot_pddf_residuals(key,self.pdf_dict_err[key])
self.plot_pddf_residuals_wt(key,self.pdf_dict_err[key])
def plot_fits(self):
for key,val in self.fdf_dict.items():
for key_m,val_m in val.items():
if val_m[1].empty==False:
self.plot_fit(key,key_m,val_m[0],val_m[1])
self.plot_fit_residuals(key,key_m,val_m[1])
self.plot_fit_residuals_wt(key,key_m,val_m[1])
| 48.246819
| 118
| 0.650546
| 2,798
| 18,961
| 4.203717
| 0.079342
| 0.047611
| 0.071416
| 0.080939
| 0.835827
| 0.799609
| 0.7588
| 0.736184
| 0.699881
| 0.684407
| 0
| 0.033257
| 0.194399
| 18,961
| 392
| 119
| 48.369898
| 0.736759
| 0.055008
| 0
| 0.530351
| 1
| 0
| 0.13526
| 0.013366
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060703
| false
| 0
| 0.041534
| 0
| 0.105431
| 0.00639
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1e851cfc7153f817164231edd71881bf85687b0f
| 112
|
py
|
Python
|
pyforms/terminal/Controls/ControlText.py
|
dominic-dev/pyformsd
|
23e31ceff2943bc0f7286d25dd14450a14b986af
|
[
"MIT"
] | null | null | null |
pyforms/terminal/Controls/ControlText.py
|
dominic-dev/pyformsd
|
23e31ceff2943bc0f7286d25dd14450a14b986af
|
[
"MIT"
] | null | null | null |
pyforms/terminal/Controls/ControlText.py
|
dominic-dev/pyformsd
|
23e31ceff2943bc0f7286d25dd14450a14b986af
|
[
"MIT"
] | null | null | null |
from pyforms.terminal.Controls.ControlBase import ControlBase
class ControlText(ControlBase):pass
| 22.4
| 61
| 0.767857
| 11
| 112
| 7.818182
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.178571
| 112
| 5
| 62
| 22.4
| 0.934783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
1e8637e33980200d741517bee56227d9422268c6
| 3,270
|
py
|
Python
|
matchbook/tests/test_referencedata.py
|
jackrhunt13/matchbook
|
a12ac26e272ddc004f2590b4f4ad8f4715f1df66
|
[
"MIT"
] | 11
|
2017-07-11T10:08:19.000Z
|
2021-01-22T17:08:44.000Z
|
matchbook/tests/test_referencedata.py
|
oddoneuk/matchbook
|
eb37817c4f6604097be406edf2df7f711586dcf6
|
[
"MIT"
] | 10
|
2017-07-14T23:43:25.000Z
|
2021-08-19T17:21:10.000Z
|
matchbook/tests/test_referencedata.py
|
oddoneuk/matchbook
|
eb37817c4f6604097be406edf2df7f711586dcf6
|
[
"MIT"
] | 9
|
2017-12-13T13:25:42.000Z
|
2021-07-16T18:24:23.000Z
|
import unittest
import unittest.mock as mock
from matchbook.apiclient import APIClient
from matchbook.endpoints.referencedata import ReferenceData
class ReferenceDataTest(unittest.TestCase):
def setUp(self):
self.client = APIClient('username', 'password')
self.reference_data = ReferenceData(self.client)
@mock.patch('matchbook.endpoints.referencedata.ReferenceData.process_response')
@mock.patch('matchbook.endpoints.referencedata.ReferenceData.request', return_value=mock.Mock())
def test_get_currencies(self, mock_request, mock_process_response):
self.reference_data.get_currencies()
mock_request.assert_called_once_with("GET", self.client.urn_main, 'lookups/currencies', session=None,)
assert mock_process_response.call_count == 1
@mock.patch('matchbook.endpoints.referencedata.ReferenceData.process_response')
@mock.patch('matchbook.endpoints.referencedata.ReferenceData.request', return_value=mock.Mock())
def test_get_sports(self, mock_request, mock_process_response):
self.reference_data.get_sports()
mock_request.assert_called_once_with("GET", self.client.urn_edge, 'lookups/sports',
params={'order': 'name asc', 'per-page': 500}, session=None,)
assert mock_process_response.call_count == 1
@mock.patch('matchbook.endpoints.referencedata.ReferenceData.process_response')
@mock.patch('matchbook.endpoints.referencedata.ReferenceData.request', return_value=mock.Mock())
def test_get_oddstype(self, mock_request, mock_process_response):
self.reference_data.get_oddstype()
mock_request.assert_called_once_with("GET", self.client.urn_main, 'lookups/odds-types', session=None,)
assert mock_process_response.call_count == 1
@mock.patch('matchbook.endpoints.referencedata.ReferenceData.process_response')
@mock.patch('matchbook.endpoints.referencedata.ReferenceData.request', return_value=mock.Mock())
def test_get_countries(self, mock_request, mock_process_response):
self.reference_data.get_countries()
mock_request.assert_called_once_with("GET", self.client.urn_main, 'lookups/countries', session=None,)
assert mock_process_response.call_count == 1
@mock.patch('matchbook.endpoints.referencedata.ReferenceData.process_response')
@mock.patch('matchbook.endpoints.referencedata.ReferenceData.request', return_value=mock.Mock())
def test_get_regions(self, mock_request, mock_process_response):
self.reference_data.get_regions(country_id=1)
mock_request.assert_called_once_with("GET", self.client.urn_main, 'lookups/regions/1', session=None,)
assert mock_process_response.call_count == 1
@mock.patch('matchbook.endpoints.referencedata.ReferenceData.process_response')
@mock.patch('matchbook.endpoints.referencedata.ReferenceData.request', return_value=mock.Mock())
def test_get_navigation(self, mock_request, mock_process_response):
self.reference_data.get_navigation()
mock_request.assert_called_once_with(
"GET", self.client.urn_edge, 'navigation', params={'offset': 0, 'per-page': 500}, session=None,
)
assert mock_process_response.call_count == 1
| 50.307692
| 110
| 0.750459
| 389
| 3,270
| 6.041131
| 0.154242
| 0.114894
| 0.171489
| 0.137872
| 0.806809
| 0.806809
| 0.806809
| 0.806809
| 0.806809
| 0.806809
| 0
| 0.005321
| 0.13792
| 3,270
| 64
| 111
| 51.09375
| 0.828308
| 0
| 0
| 0.382979
| 0
| 0
| 0.268278
| 0.218415
| 0
| 0
| 0
| 0
| 0.255319
| 1
| 0.148936
| false
| 0.021277
| 0.085106
| 0
| 0.255319
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1e8f489835cfb3d4fe8abf4f35f2a2bad935cef2
| 8,384
|
py
|
Python
|
bulk_renamer/commands.py
|
jfilipedias/bulk-renamer
|
ba3bde289b8383a0ac94baec186afa0e4a06b983
|
[
"MIT"
] | null | null | null |
bulk_renamer/commands.py
|
jfilipedias/bulk-renamer
|
ba3bde289b8383a0ac94baec186afa0e4a06b983
|
[
"MIT"
] | null | null | null |
bulk_renamer/commands.py
|
jfilipedias/bulk-renamer
|
ba3bde289b8383a0ac94baec186afa0e4a06b983
|
[
"MIT"
] | null | null | null |
import typer
from rich.console import Console
from bulk_renamer.functions import confirm_changes, get_cwd_file_paths, get_value_input
app = typer.Typer()
console = Console()
@app.command()
def alternate() -> None:
"""Alternate the name characters between upper and lowercase."""
current_file_paths = get_cwd_file_paths()
current_filenames = []
new_filenames = []
for path in current_file_paths:
name = path.stem
extension = path.suffix
if not name:
continue
formated_name = ""
upper = True
for char in name:
formated_name += char.upper() if upper else char.lower()
upper = not upper
new_filenames.append(f"{formated_name}{extension}")
current_filenames.append(f"{name}{extension}")
confirm_changes(current_filenames, new_filenames)
@app.command()
def camel(
whitespace: bool = typer.Option(
False, "--whitespace", "-w", help="Maintains the filename whitespaces."
)
) -> None:
"""Format the filename to camel case convention."""
current_file_paths = get_cwd_file_paths()
current_filenames = []
new_filenames = []
for path in current_file_paths:
name = path.stem
extension = path.suffix
if not name:
continue
new_name = name.lower()
new_name = new_name.title()
new_name = new_name[:1].lower() + new_name[1:]
if not whitespace:
new_name = new_name.replace(" ", "")
new_filenames.append(f"{new_name}{extension}")
current_filenames.append(f"{name}{extension}")
confirm_changes(current_filenames, new_filenames)
@app.command()
def lower() -> None:
"""Set the filename to lowercase."""
current_file_paths = get_cwd_file_paths()
current_filenames = []
new_filenames = []
for path in current_file_paths:
name = path.stem
extension = path.suffix
if not name:
continue
new_name = name.lower()
new_filenames.append(f"{new_name}{extension}")
current_filenames.append(f"{name}{extension}")
confirm_changes(current_filenames, new_filenames)
@app.command()
def kebab(
upper: bool = typer.Option(
False, "--upper", "-u", help="Set all characters to uppercase."
)
) -> None:
"""Format the filename to kebab case convention."""
current_file_paths = get_cwd_file_paths()
current_filenames = []
new_filenames = []
for path in current_file_paths:
name = path.stem
extension = path.suffix
if not name:
continue
new_name = name.upper() if upper else name.lower()
new_name = new_name.replace(" ", "-")
new_name = new_name.replace("_", "-")
new_filenames.append(f"{new_name}{extension}")
current_filenames.append(f"{name}{extension}")
confirm_changes(current_filenames, new_filenames)
@app.command()
def pascal(
whitespace: bool = typer.Option(
False, "--whitespace", "-w", help="Maintains the filename whitespaces."
)
) -> None:
"""Format the filename to pascal case convention."""
current_file_paths = get_cwd_file_paths()
current_filenames = []
new_filenames = []
for path in current_file_paths:
name = path.stem
extension = path.suffix
if not name:
continue
new_name = name.title()
if not whitespace:
new_name = new_name.replace(" ", "")
new_filenames.append(f"{new_name}{extension}")
current_filenames.append(f"{name}{extension}")
confirm_changes(current_filenames, new_filenames)
@app.command()
def prefix(
value: str = typer.Option(
"", help="The string to be added to the beginning of the filename."
)
) -> None:
"Adds a string to the beginning of the filename."
add_affix_to_filename(value)
@app.command()
def remove(
value: str = typer.Argument(..., help="The string to remove from filename.")
) -> None:
"""Remove a specified string from the filename."""
current_file_paths = get_cwd_file_paths()
current_filenames = []
new_filenames = []
if not value:
value = get_value_input("What's the string you want to remove?")[0]
for path in current_file_paths:
name = path.stem
extension = path.suffix
if not name or value not in name:
continue
new_name = name.replace(value, "")
new_filenames.append(f"{new_name}{extension}")
current_filenames.append(f"{name}{extension}")
if not current_filenames:
console.print(f"The value [cyan]{value}[/] wasn't found.")
else:
confirm_changes(current_filenames, new_filenames)
@app.command()
def replace(
old_value: str = typer.Option("", help="The string to shearch for."),
new_value: str = typer.Option("", help="The string to replace the old value with."),
) -> None:
"""Replaces a specified string in the filename with another specified string."""
if not old_value and not new_value:
new_value, old_value = get_value_input(
new_value_message="What's the new value you want to put?",
old_value_message="What's the old value you want to replace?",
)
elif not old_value:
old_value = get_value_input(
new_value_message="",
old_value_message="What's the string you want to remove?",
)[1]
elif not new_value:
new_value = get_value_input("What's the string you want to put?")[0]
current_file_paths = get_cwd_file_paths()
current_filenames = []
new_filenames = []
for path in current_file_paths:
name = path.stem
extension = path.suffix
if not name or old_value not in name:
continue
new_name = name.replace(old_value, new_value)
new_filenames.append(f"{new_name}{extension}")
current_filenames.append(f"{name}{extension}")
if not current_filenames:
console.print(f"The value [cyan]{old_value}[/] wasn't found.")
else:
confirm_changes(current_filenames, new_filenames)
@app.command()
def snake(
upper: bool = typer.Option(
False, "--upper", "-u", help="Set all characters to uppercase."
)
) -> None:
"""Format the filename to snake case convention."""
current_file_paths = get_cwd_file_paths()
current_filenames = []
new_filenames = []
for path in current_file_paths:
name = path.stem
extension = path.suffix
if not name:
continue
new_name = name.upper() if upper else name.lower()
new_name = new_name.replace(" ", "_")
new_filenames.append(f"{new_name}{extension}")
current_filenames.append(f"{name}{extension}")
confirm_changes(current_filenames, new_filenames)
@app.command()
def suffix(
value: str = typer.Option(
"", help="The string to be added to the ending of the filename."
)
) -> None:
"Adds a string to the ending of the filename."
add_affix_to_filename(value, is_prefix=False)
@app.command()
def upper() -> None:
"""Set the filename to uppercase."""
current_file_paths = get_cwd_file_paths()
current_filenames = []
new_filenames = []
for path in current_file_paths:
name = path.stem
extension = path.suffix
if not name:
continue
new_name = name.upper()
new_filenames.append(f"{new_name}{extension}")
current_filenames.append(f"{name}{extension}")
confirm_changes(current_filenames, new_filenames)
def add_affix_to_filename(value: str = "", is_prefix: bool = True) -> None:
"""Adds a affix to a filename."""
if not value:
affix = "prefix" if is_prefix else "suffix"
value = get_value_input(f"What's the {affix} you want to add?")[0]
current_file_paths = get_cwd_file_paths()
current_filenames = []
new_filenames = []
for path in current_file_paths:
name = path.stem
extension = path.suffix
if not name:
continue
new_name = f"{value}{name}" if is_prefix else f"{name}{value}"
new_filenames.append(f"{new_name}{extension}")
current_filenames.append(f"{name}{extension}")
confirm_changes(current_filenames, new_filenames)
| 26.615873
| 88
| 0.634781
| 1,059
| 8,384
| 4.806421
| 0.100094
| 0.045383
| 0.062868
| 0.11002
| 0.818075
| 0.79391
| 0.778389
| 0.778389
| 0.744008
| 0.699214
| 0
| 0.000958
| 0.25334
| 8,384
| 314
| 89
| 26.700637
| 0.812141
| 0.065124
| 0
| 0.691244
| 0
| 0
| 0.154569
| 0.027284
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0553
| false
| 0
| 0.013825
| 0
| 0.069124
| 0.009217
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1ec11b6314f8de85041be81ac37b239eff09d230
| 140
|
py
|
Python
|
api/admin.py
|
prakash3720/django-rest
|
6c275ad0bb8b7b6d070a16073573d9996e846d48
|
[
"MIT"
] | null | null | null |
api/admin.py
|
prakash3720/django-rest
|
6c275ad0bb8b7b6d070a16073573d9996e846d48
|
[
"MIT"
] | 7
|
2020-06-06T01:37:36.000Z
|
2022-02-10T14:21:49.000Z
|
api/admin.py
|
prakash3720/django-rest
|
6c275ad0bb8b7b6d070a16073573d9996e846d48
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from api import models
admin.site.register(models.UserProfile)
admin.site.register(models.ProfileTodoItem)
| 28
| 43
| 0.85
| 19
| 140
| 6.263158
| 0.578947
| 0.151261
| 0.285714
| 0.386555
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 140
| 4
| 44
| 35
| 0.915385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
a201e0039c6c3efa71d928642d5cc3a7b9558fcd
| 95
|
py
|
Python
|
Arknights/__init__.py
|
LittleNightmare/ArknightsAutoHelper
|
c8df51f00e0d17c636f74ed58c4b16e12459ddbe
|
[
"MIT"
] | 1
|
2021-05-03T13:39:08.000Z
|
2021-05-03T13:39:08.000Z
|
Arknights/__init__.py
|
ZhouZiHao-Moon/ArknightsAutoHelper
|
3135b54d69f2255f99c13d42dc936065701c3129
|
[
"MIT"
] | null | null | null |
Arknights/__init__.py
|
ZhouZiHao-Moon/ArknightsAutoHelper
|
3135b54d69f2255f99c13d42dc936065701c3129
|
[
"MIT"
] | null | null | null |
from Arknights.base import ArknightsHelper
from Arknights.ArknightsShell import ArknightsShell
| 31.666667
| 51
| 0.894737
| 10
| 95
| 8.5
| 0.6
| 0.305882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084211
| 95
| 2
| 52
| 47.5
| 0.977011
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a216993c76debc9606b3b65cc3c396de09339c2e
| 10,379
|
py
|
Python
|
tests/test_sklearn_mlp_converter.py
|
RTHMaK/sklearn-onnx
|
dbbd4a04f0a395549b1e5465c5d65ceaef07a726
|
[
"MIT"
] | null | null | null |
tests/test_sklearn_mlp_converter.py
|
RTHMaK/sklearn-onnx
|
dbbd4a04f0a395549b1e5465c5d65ceaef07a726
|
[
"MIT"
] | null | null | null |
tests/test_sklearn_mlp_converter.py
|
RTHMaK/sklearn-onnx
|
dbbd4a04f0a395549b1e5465c5d65ceaef07a726
|
[
"MIT"
] | 1
|
2020-10-01T09:26:27.000Z
|
2020-10-01T09:26:27.000Z
|
"""
Tests scikit-learn's MLPClassifier and MLPRegressor converters.
"""
import unittest
from sklearn.neural_network import MLPClassifier, MLPRegressor
from skl2onnx import convert_sklearn
from skl2onnx.common.data_types import FloatTensorType, Int64TensorType
from skl2onnx.common.data_types import onnx_built_with_ml
from test_utils import (
dump_data_and_model,
fit_classification_model,
fit_multilabel_classification_model,
fit_regression_model,
)
class TestSklearnMLPConverters(unittest.TestCase):
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_mlp_classifier_binary(self):
model, X_test = fit_classification_model(
MLPClassifier(random_state=42), 2)
model_onnx = convert_sklearn(
model,
"scikit-learn MLPClassifier",
[("input", FloatTensorType([None, X_test.shape[1]]))],
)
self.assertTrue(model_onnx is not None)
dump_data_and_model(
X_test,
model,
model_onnx,
basename="SklearnMLPClassifierBinary",
allow_failure="StrictVersion("
"onnxruntime.__version__)<= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_mlp_classifier_multiclass_default(self):
model, X_test = fit_classification_model(
MLPClassifier(random_state=42), 4)
model_onnx = convert_sklearn(
model,
"scikit-learn MLPClassifier",
[("input", FloatTensorType([None, X_test.shape[1]]))],
)
self.assertTrue(model_onnx is not None)
dump_data_and_model(
X_test,
model,
model_onnx,
basename="SklearnMLPClassifierMultiClass",
allow_failure="StrictVersion("
"onnxruntime.__version__)<= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_mlp_classifier_multilabel_default(self):
model, X_test = fit_multilabel_classification_model(
MLPClassifier(random_state=42))
model_onnx = convert_sklearn(
model,
"scikit-learn MLPClassifier",
[("input", FloatTensorType([None, X_test.shape[1]]))],
)
self.assertTrue(model_onnx is not None)
dump_data_and_model(
X_test,
model,
model_onnx,
basename="SklearnMLPClassifierMultiLabel",
allow_failure="StrictVersion("
"onnxruntime.__version__)<= StrictVersion('0.2.1')",
)
def test_model_mlp_regressor_default(self):
model, X_test = fit_regression_model(
MLPRegressor(random_state=42))
model_onnx = convert_sklearn(
model,
"scikit-learn MLPRegressor",
[("input", FloatTensorType([None, X_test.shape[1]]))],
)
self.assertTrue(model_onnx is not None)
dump_data_and_model(
X_test,
model,
model_onnx,
basename="SklearnMLPRegressor-Dec4",
allow_failure="StrictVersion("
"onnxruntime.__version__)<= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_mlp_classifier_multiclass_identity(self):
model, X_test = fit_classification_model(
MLPClassifier(random_state=42, activation="identity"), 3,
is_int=True)
model_onnx = convert_sklearn(
model,
"scikit-learn MLPClassifier",
[("input", Int64TensorType([None, X_test.shape[1]]))],
)
self.assertTrue(model_onnx is not None)
dump_data_and_model(
X_test,
model,
model_onnx,
basename="SklearnMLPClassifierMultiClassIdentityActivation",
allow_failure="StrictVersion("
"onnxruntime.__version__)<= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_mlp_classifier_multilabel_identity(self):
model, X_test = fit_multilabel_classification_model(
MLPClassifier(random_state=42, activation="identity"),
is_int=True)
model_onnx = convert_sklearn(
model,
"scikit-learn MLPClassifier",
[("input", Int64TensorType([None, X_test.shape[1]]))],
)
self.assertTrue(model_onnx is not None)
dump_data_and_model(
X_test,
model,
model_onnx,
basename="SklearnMLPClassifierMultiLabelIdentityActivation",
allow_failure="StrictVersion("
"onnxruntime.__version__)<= StrictVersion('0.2.1')",
)
def test_model_mlp_regressor_identity(self):
model, X_test = fit_regression_model(
MLPRegressor(random_state=42, activation="identity"), is_int=True)
model_onnx = convert_sklearn(
model,
"scikit-learn MLPRegressor",
[("input", Int64TensorType([None, X_test.shape[1]]))],
)
self.assertTrue(model_onnx is not None)
dump_data_and_model(
X_test,
model,
model_onnx,
basename="SklearnMLPRegressorIdentityActivation-Dec4",
allow_failure="StrictVersion("
"onnxruntime.__version__)<= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_mlp_classifier_multiclass_logistic(self):
model, X_test = fit_classification_model(
MLPClassifier(random_state=42, activation="logistic"), 5)
model_onnx = convert_sklearn(
model,
"scikit-learn MLPClassifier",
[("input", FloatTensorType([None, X_test.shape[1]]))],
)
self.assertTrue(model_onnx is not None)
dump_data_and_model(
X_test,
model,
model_onnx,
basename="SklearnMLPClassifierMultiClassLogisticActivation",
allow_failure="StrictVersion("
"onnxruntime.__version__)<= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_mlp_classifier_multilabel_logistic(self):
model, X_test = fit_multilabel_classification_model(
MLPClassifier(random_state=42, activation="logistic"), n_classes=4)
model_onnx = convert_sklearn(
model,
"scikit-learn MLPClassifier",
[("input", FloatTensorType([None, X_test.shape[1]]))],
)
self.assertTrue(model_onnx is not None)
dump_data_and_model(
X_test,
model,
model_onnx,
basename="SklearnMLPClassifierMultiLabelLogisticActivation",
allow_failure="StrictVersion("
"onnxruntime.__version__)<= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_mlp_regressor_logistic(self):
model, X_test = fit_regression_model(
MLPRegressor(random_state=42, activation="logistic"))
model_onnx = convert_sklearn(
model,
"scikit-learn MLPRegressor",
[("input", FloatTensorType([None, X_test.shape[1]]))],
)
self.assertTrue(model_onnx is not None)
dump_data_and_model(
X_test,
model,
model_onnx,
basename="SklearnMLPRegressorLogisticActivation-Dec4",
allow_failure="StrictVersion("
"onnxruntime.__version__)<= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_mlp_classifier_multiclass_tanh(self):
model, X_test = fit_classification_model(
MLPClassifier(random_state=42, activation="tanh"), 3)
model_onnx = convert_sklearn(
model,
"scikit-learn MLPClassifier",
[("input", FloatTensorType([None, X_test.shape[1]]))],
)
self.assertTrue(model_onnx is not None)
dump_data_and_model(
X_test,
model,
model_onnx,
basename="SklearnMLPClassifierMultiClassTanhActivation",
allow_failure="StrictVersion("
"onnxruntime.__version__)<= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_model_mlp_classifier_multilabel_tanh(self):
model, X_test = fit_multilabel_classification_model(
MLPClassifier(random_state=42, activation="tanh"), n_labels=3)
model_onnx = convert_sklearn(
model,
"scikit-learn MLPClassifier",
[("input", FloatTensorType([None, X_test.shape[1]]))],
)
self.assertTrue(model_onnx is not None)
dump_data_and_model(
X_test,
model,
model_onnx,
basename="SklearnMLPClassifierMultiLabelTanhActivation",
allow_failure="StrictVersion("
"onnxruntime.__version__)<= StrictVersion('0.2.1')",
)
def test_model_mlp_regressor_tanh(self):
model, X_test = fit_regression_model(
MLPRegressor(random_state=42, activation="tanh"))
model_onnx = convert_sklearn(
model,
"scikit-learn MLPRegressor",
[("input", FloatTensorType([None, X_test.shape[1]]))],
)
self.assertTrue(model_onnx is not None)
dump_data_and_model(
X_test,
model,
model_onnx,
basename="SklearnMLPRegressorTanhActivation-Dec4",
allow_failure="StrictVersion("
"onnxruntime.__version__)<= StrictVersion('0.2.1')",
)
if __name__ == "__main__":
unittest.main()
| 37.200717
| 79
| 0.606995
| 1,008
| 10,379
| 5.893849
| 0.095238
| 0.032823
| 0.043764
| 0.037704
| 0.850025
| 0.84649
| 0.821747
| 0.821747
| 0.816361
| 0.814341
| 0
| 0.013646
| 0.293959
| 10,379
| 278
| 80
| 37.334532
| 0.79708
| 0.00607
| 0
| 0.692607
| 0
| 0
| 0.200621
| 0.110206
| 0
| 0
| 0
| 0
| 0.050584
| 1
| 0.050584
| false
| 0
| 0.023346
| 0
| 0.077821
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
bf57903e93ae726b6448ff1fd294672dd49d3ed2
| 412,432
|
py
|
Python
|
ibmpairs/catalog.py
|
taylorsteffanj/ibmpairs
|
8892ac67ebebee300eaed4167cca8685f8efd82e
|
[
"BSD-3-Clause"
] | 1
|
2019-05-01T14:48:57.000Z
|
2019-05-01T14:48:57.000Z
|
ibmpairs/catalog.py
|
taylorsteffanj/ibmpairs
|
8892ac67ebebee300eaed4167cca8685f8efd82e
|
[
"BSD-3-Clause"
] | null | null | null |
ibmpairs/catalog.py
|
taylorsteffanj/ibmpairs
|
8892ac67ebebee300eaed4167cca8685f8efd82e
|
[
"BSD-3-Clause"
] | null | null | null |
"""
IBM PAIRS Catalog: A Python module to assist with the retrival, creation,
update and deletion of metadata in the IBM PAIRS catalog.
Copyright 2019-2021 Physical Analytics, IBM Research All Rights Reserved.
SPDX-License-Identifier: BSD-3-Clause
"""
# fold: Import Python Standard Library {{{
# Python Standard Library:
import json
import os
from typing import List, Any
import re
#}}}
# fold: Import ibmpairs Modules {{{
# ibmpairs Modules:
import ibmpairs.client as cl
import ibmpairs.common as common
import ibmpairs.constants as constants
from ibmpairs.logger import logger
import ibmpairs.messages as messages
#}}}
# fold: Import Third Party Libraries {{{
# Third Party Libraries:
import pandas as pd
try:
import rasterio
HAS_RASTERIO=True
except:
HAS_RASTERIO=False
from tableschema import Table
#}}}
#
class Category:
#_id: int
#_name: str
"""
An object to represent a catalog category.
:param id: category id
:type id: int
:param name: category name
:type name: str
"""
#
def __str__(self):
"""
The method creates a string representation of the internal class structure.
:returns: A string representation of the internal class structure.
:rtype: str
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __repr__(self):
"""
The method creates a dict representation of the internal class structure.
:returns: A dict representation of the internal class structure.
:rtype: dict
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __init__(self,
id: int = None,
name: str = None
):
self._id = id
self._name = name
#
def get_id(self):
return self._id
#
def set_id(self, id):
self._id = common.check_int(id)
#
def del_id(self):
del self._id
#
id = property(get_id, set_id, del_id)
#
def get_name(self):
return self._name
#
def set_name(self, name):
self._name = common.check_str(name)
#
def del_name(self):
del self._name
#
name = property(get_name, set_name, del_name)
#
def from_dict(category_dict: Any):
"""
Create a Category object from a dictionary.
:param category_dict: A dictionary that contains the keys of a Category.
:type category_dict: Any
:rtype: ibmpairs.catalog.Category
:raises Exception: if not a dictionary.
"""
id = None
name = None
common.check_dict(category_dict)
if "id" in category_dict:
if category_dict.get("id") is not None:
id = common.check_int(category_dict.get("id"))
if "name" in category_dict:
if category_dict.get("name") is not None:
name = common.check_str(category_dict.get("name"))
return Category(id = id,
name = name
)
#
def to_dict(self):
"""
Create a dictionary from the objects structure.
:rtype: dict
"""
category_dict: dict = {}
if self._id is not None:
category_dict["id"] = self._id
if self._name is not None:
category_dict["name"] = self._name
return category_dict
#
def from_json(category_json: Any):
"""
Create a Category object from json (dictonary or str).
:param category_dict: A json dictionary that contains the keys of a Category or a string representation of a json dictionary.
:type category_dict: Any
:rtype: ibmpairs.catalog.Category
:raises Exception: if not a dictionary or a string.
"""
if isinstance(category_json, dict):
category = Category.from_dict(category_json)
elif isinstance(category_json, str):
category_dict = json.loads(category_json)
category = Category.from_dict(category_dict)
else:
msg = messages.ERROR_FROM_JSON_TYPE_NOT_RECOGNIZED.format(type(category), "category")
logger.error(msg)
raise common.PAWException(msg)
return category
#
def to_json(self):
"""
Create a string representation of a json dictionary from the objects structure.
:rtype: string
"""
return json.dumps(self.to_dict())
#
class Properties:
#_sector: List[str]
#_application: List[str]
#_domain: List[str]
#_type: List[str]
#_source: List[str]
"""
An object to represent a list of catalog properties.
:param sector: A list of sectors
:type sector: List[str]
:param application: A list of applications
:type application: List[str]
:param domain: A list of domains
:type domain: List[str]
:param type: A list of types
:type type: List[str]
:param source: A list of sources
:type source: List[str]
"""
#
def __str__(self):
"""
The method creates a string representation of the internal class structure.
:returns: A string representation of the internal class structure.
:rtype: str
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __repr__(self):
"""
The method creates a dict representation of the internal class structure.
:returns: A dict representation of the internal class structure.
:rtype: dict
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __init__(self,
sector: List[str] = None,
application: List[str] = None,
domain: List[str] = None,
type: List[str] = None,
source: List[str] = None
):
self._sector = sector
self._application = application
self._domain = domain
self._type = type
self._source = source
#
def get_sector(self):
return self._sector
#
def set_sector(self, sector):
self._sector = common.check_class(sector, List[str])
#
def del_sector(self):
del self._sector
#
sector = property(get_sector, set_sector, del_sector)
#
def get_application(self):
return self._application
#
def set_application(self, application):
self._application = common.check_class(application, List[str])
#
def del_application(self):
del self._application
#
application = property(get_application, set_application, del_application)
#
def get_domain(self):
return self._domain
#
def set_domain(self, domain):
self._domain = common.check_class(domain, List[str])
#
def del_domain(self):
del self._domain
#
domain = property(get_domain, set_domain, del_domain)
#
def get_type(self):
return self._type
#
def set_type(self, type):
self._type = common.check_class(type, List[str])
#
def del_type(self):
del self._type
#
type = property(get_type, set_type, del_type)
#
def get_source(self):
return self._source
#
def set_source(self, source):
self._source = common.check_class(source, List[str])
#
def del_source(self):
del self._source
#
source = property(get_source, set_source, del_source)
#
def from_dict(properties_dict: Any):
"""
Create a Properties object from a dictionary.
:param properties_dict: A dictionary that contains the keys of a Properties.
:type properties_dict: Any
:rtype: ibmpairs.catalog.Properties
:raises Exception: if not a dictionary.
"""
sector = None
application = None
domain = None
type = None
source = None
common.check_dict(properties_dict)
if "Sector" in properties_dict:
if properties_dict.get("Sector") is not None:
sector = common.from_list(properties_dict.get("Sector"), common.check_str)
elif "sector" in properties_dict:
if properties_dict.get("sector") is not None:
sector = common.from_list(properties_dict.get("sector"), common.check_str)
if "Application" in properties_dict:
if properties_dict.get("Application") is not None:
application = common.from_list(properties_dict.get("Application"), common.check_str)
elif "application" in properties_dict:
if properties_dict.get("application") is not None:
application = common.from_list(properties_dict.get("application"), common.check_str)
if "Domain" in properties_dict:
if properties_dict.get("Domain") is not None:
domain = common.from_list(properties_dict.get("Domain"), common.check_str)
elif "domain" in properties_dict:
if properties_dict.get("domain") is not None:
domain = common.from_list(properties_dict.get("domain"), common.check_str)
if "Type" in properties_dict:
if properties_dict.get("Type") is not None:
type = common.from_list(properties_dict.get("Type"), common.check_str)
elif "type" in properties_dict:
if properties_dict.get("type") is not None:
type = common.from_list(properties_dict.get("type"), common.check_str)
if "Source" in properties_dict:
if properties_dict.get("Source") is not None:
source = common.from_list(properties_dict.get("Source"), common.check_str)
elif "source" in properties_dict:
if properties_dict.get("source") is not None:
source = common.from_list(properties_dict.get("source"), common.check_str)
return Properties(sector = sector,
application = application,
domain = domain,
type = type,
source = source
)
#
def to_dict(self):
"""
Create a dictionary from the objects structure.
:rtype: dict
"""
properties_dict: dict = {}
if self._sector is not None:
properties_dict["sector"] = common.from_list(self._sector, common.check_str)
if self._application is not None:
properties_dict["application"] = common.from_list(self._application, common.check_str)
if self._domain is not None:
properties_dict["domain"] = common.from_list(self._domain, common.check_str)
if self._type is not None:
properties_dict["type"] = common.from_list(self._type, common.check_str)
if self._source is not None:
properties_dict["source"] = common.from_list(self._source, common.check_str)
return properties_dict
#
def from_json(properties_json: Any):
"""
Create a Properties object from json (dictonary or str).
:param properties_dict: A json dictionary that contains the keys of a Properties or a string representation of a json dictionary.
:type properties_dict: Any
:rtype: ibmpairs.catalog.Properties
:raises Exception: if not a dictionary or a string.
"""
if isinstance(properties_json, dict):
properties = Properties.from_dict(properties_json)
elif isinstance(properties_json, str):
properties_dict = json.loads(properties_json)
properties = Properties.from_dict(properties_dict)
else:
msg = messages.ERROR_FROM_JSON_TYPE_NOT_RECOGNIZED.format(type(properties), "properties")
logger.error(msg)
raise common.PAWException(msg)
return properties
#
def to_json(self):
"""
Create a string representation of a json dictionary from the objects structure.
:rtype: string
"""
return json.dumps(self.to_dict())
class SpatialCoverage:
#_country: List[str]
"""
An object to represent a catalog spatial coverage.
:param country: A list of countries
:type country: List[str]
"""
#
def __str__(self):
"""
The method creates a string representation of the internal class structure.
:returns: A string representation of the internal class structure.
:rtype: str
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __repr__(self):
"""
The method creates a dict representation of the internal class structure.
:returns: A dict representation of the internal class structure.
:rtype: dict
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __init__(self,
country: List[str] = None
):
self._country = country
#
def get_country(self):
return self._country
#
def set_country(self, country):
self._country = common.check_class(country, List[str])
#
def del_country(self):
del self._country
#
country = property(get_country, set_country, del_country)
#
def from_dict(spatial_coverage_dict: Any):
"""
Create a SpatialCoverage object from a dictionary.
:param spatial_coverage_dict: A dictionary that contains the keys of a SpatialCoverage.
:type spatial_coverage_dict: Any
:rtype: ibmpairs.catalog.SpatialCoverage
:raises Exception: if not a dictionary.
"""
country = None
common.check_dict(spatial_coverage_dict)
if "Country" in spatial_coverage_dict:
if spatial_coverage_dict.get("Country") is not None:
country = common.from_list(spatial_coverage_dict.get("Country"), common.check_str)
elif "country" in spatial_coverage_dict:
if spatial_coverage_dict.get("country") is not None:
country = common.from_list(spatial_coverage_dict.get("country"), common.check_str)
return SpatialCoverage(country)
#
def to_dict(self):
"""
Create a dictionary from the objects structure.
:rtype: dict
"""
spatial_coverage_dict: dict = {}
if self._country is not None:
spatial_coverage_dict["country"] = common.from_list(self._country, common.check_str)
return spatial_coverage_dict
#
def from_json(spatial_coverage_json: Any):
"""
Create a SpatialCoverage object from json (dictonary or str).
:param spatial_coverage_dict: A json dictionary that contains the keys of a SpatialCoverage or a string representation of a json dictionary.
:type spatial_coverage_dict: Any
:rtype: ibmpairs.catalog.SpatialCoverage
:raises Exception: if not a dictionary or a string.
"""
if isinstance(spatial_coverage_json, dict):
spatial_coverage = SpatialCoverage.from_dict(spatial_coverage_json)
elif isinstance(spatial_coverage_json, str):
spatial_coverage_dict = json.loads(spatial_coverage_json)
spatial_coverage = SpatialCoverage.from_dict(spatial_coverage_dict)
else:
msg = messages.ERROR_FROM_JSON_TYPE_NOT_RECOGNIZED.format(type(spatial_coverage), "spatial_coverage")
logger.error(msg)
raise common.PAWException(msg)
return spatial_coverage
#
def to_json(self):
"""
Create a string representation of a json dictionary from the objects structure.
:rtype: string
"""
return json.dumps(self.to_dict())
#
class DataSetReturn:
#_data_set_id: str
#_status: int
#_message: str
"""
An object to represent the response from a DataSet object call.
:param data_set_id: A data set id.
:type data_set_id: str
:param status: A status code.
:type status: int
:param message: A status message from the call.
:type message: str
"""
#
def __str__(self):
"""
The method creates a string representation of the internal class structure.
:returns: A string representation of the internal class structure.
:rtype: str
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __repr__(self):
"""
The method creates a dict representation of the internal class structure.
:returns: A dict representation of the internal class structure.
:rtype: dict
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __init__(self,
data_set_id: str = None,
status: int = None,
message: str = None
):
self._data_set_id = data_set_id
self._status = status
self._message = message
#
def get_data_set_id(self):
return self._data_set_id
#
def set_data_set_id(self, data_set_id):
self._data_set_id = common.check_str(data_set_id)
#
def del_data_set_id(self):
del self._data_set_id
#
data_set_id = property(get_data_set_id, set_data_set_id, del_data_set_id)
#
def get_status(self):
return self._status
#
def set_status(self, status):
self._status = common.check_int(status)
#
def del_status(self):
del self._status
#
status = property(get_status, set_status, del_status)
#
def get_message(self):
return self._message
#
def set_message(self, message):
self._message = common.check_str(message)
#
def del_message(self):
del self._message
#
message = property(get_message, set_message, del_message)
#
def from_dict(data_set_return_dict: Any):
"""
Create a DataSetReturn object from a dictionary.
:param data_set_return_dict: A dictionary that contains the keys of a DataSetReturn.
:type data_set_return_dict: Any
:rtype: ibmpairs.catalog.DataSetReturn
:raises Exception: if not a dictionary.
"""
data_set_id = None
status = None
message = None
common.check_dict(data_set_return_dict)
if "datasetId" in data_set_return_dict:
if data_set_return_dict.get("datasetId") is not None:
data_set_id = common.check_str(data_set_return_dict.get("datasetId"))
elif "data_set_id" in data_set_return_dict:
if data_set_return_dict.get("data_set_id") is not None:
data_set_id = common.check_str(data_set_return_dict.get("data_set_id"))
if "status" in data_set_return_dict:
if data_set_return_dict.get("status") is not None:
status = common.check_int(data_set_return_dict.get("status"))
if "message" in data_set_return_dict:
if data_set_return_dict.get("message") is not None:
message = common.check_str(data_set_return_dict.get("message"))
return DataSetReturn(data_set_id = data_set_id,
status = status,
message = message
)
#
def to_dict(self):
"""
Create a dictionary from the objects structure.
:rtype: dict
"""
data_set_return_dict: dict = {}
if self.data_set_id is not None:
data_set_return_dict["data_set_id"] = self._data_set_id
if self._status is not None:
data_set_return_dict["status"] = self._status
if self._message is not None:
data_set_return_dict["message"] = self._message
return data_set_return_dict
#
def from_json(data_set_return_json: Any):
"""
Create a DataSetReturn object from json (dictonary or str).
:param data_set_return_dict: A json dictionary that contains the keys of a DataSetReturn or a string representation of a json dictionary.
:type data_set_return_dict: Any
:rtype: ibmpairs.catalog.DataSetReturn
:raises Exception: if not a dictionary or a string.
"""
if isinstance(data_set_return_json, dict):
data_set_return = DataSetReturn.from_dict(data_set_return_json)
elif isinstance(data_set_return_json, str):
data_set_return_dict = json.loads(data_set_return_json)
data_set_return = DataSetReturn.from_dict(data_set_return_dict)
else:
msg = messages.ERROR_FROM_JSON_TYPE_NOT_RECOGNIZED.format(type(data_set_return_json), "data_set_return_json")
logger.error(msg)
raise common.PAWException(msg)
return data_set_return
#
def to_json(self):
"""
Create a string representation of a json dictionary from the objects structure.
:rtype: string
"""
return json.dumps(self.to_dict())
#
class DataSet:
#
#_client: cl.Client
# Common
#_name: str
#_category: Category
#_max_layers: int
#_name_alternate: str
#_rating: float
#_description_short: str
#_description_long: str
#_description_links: List[str]
#_data_source_name: str
#_data_source_attribution: str
#_data_source_description: str
#_data_source_links: List[str]
#_update_interval_max: str
#_update_interval_description: str
#_lag_horizon: str
#_lag_horizon_description: str
#_temporal_resolution: str
#_temporal_resolution_description: str
#_spatial_resolution_of_raw_data: str
#_interpolation: str
#_dimensions_description: str
#_permanence: bool
#_permanence_description: str
#_known_issues: str
#_responsible_organization: str
#_properties: Properties
#_spatial_coverage: SpatialCoverage
#_latitude_min: float
#_longitude_min: float
#_latitude_max: float
#_longitude_max: float
#_temporal_min: str # datetime?
#_temporal_max: str # datetime?
# Get Exclusive
# (GET /v2/datasets/{dataset_id})
#_id: str
#_key: str
#_dsource_h_link: str
#_dsource_desc: str
#_status: str
#_data_origin: str
#_created_at: str
#_updated_at: str
# Create Exclusive
# (POST /v2/datasets/{dataset_id})
# N/A
# Update Exclusive
# (PUT /v2/datasets/{dataset_id})
# N/A
# Create & Get Common
# (POST /v2/datasets/{dataset_id})
# (GET /v2/datasets/{dataset_id})
#_level: int
#_crs: str
#_offering_status: str
# Create & Update Common
# (POST /v2/datasets/{dataset_id})
# (PUT /v2/datasets/{dataset_id})
#_contact_person: str
#_description_internal: str
#_description_internal_links: List[str]
#_data_storage_mid_term: str
#_data_storage_long_term: str
#_elt_scripts_links: List[str]
#_license_information: str
# Get & Update Common
# (GET /v2/datasets/{dataset_id})
# (PUT /v2/datasets/{dataset_id})
# N/A
# Internal
# data_set_response: DataSetReturn
"""
An object to represent an IBM PAIRS Data Set.
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param name: Data Set name.
:type name: str
:param category: A category entry.
:type category: ibmpairs.catalog.Category
:param max_layers: The maximum number of Data Layers the Data Set can contain.
:type max_layers: int
:param name_alternate: Alternative Data Set name.
:type name_alternate: str
:param rating: Rating.
:type rating: float
:param description_short: Short description of the Data Set.
:type description_short: str
:param description_long: Long description of the Data Set.
:type description_long: str
:param description_links: A list of URLs with supporting documentation.
:type description_links: List[str]
:param data_source_name: A name for the origin data source.
:type data_source_name: str
:param data_source_attribution: An attribution for the origin data source.
:type data_source_attribution: str
:param data_source_description: A description of the origin data source.
:type data_source_description: str
:param data_source_links: A list of URLs with supporting documentation of the origin data source.
:type data_source_links: List[str]
:param update_interval_max: The maximum interval of an update to the Data Set.
:type update_interval_max: str
:param update_interval_description: A description of the maximum update interval.
:type update_interval_description: str
:param lag_horizon: Lag horizon of the Data Set.
:type lag_horizon: str
:param lag_horizon_description: Lag horizon description.
:type lag_horizon_description: str
:param temporal_resolution: The temporal resolution of the Data Set.
:type temporal_resolution: str
:param temporal_resolution_description: A description of the temporal resolution.
:type temporal_resolution_description: str
:param spatial_resolution_of_raw_data: Spatial resolution of the raw data.
:type spatial_resolution_of_raw_data: str
:param interpolation: Interpolation.
:type interpolation: str
:param dimensions_description: A description of the dimensions.
:type dimensions_description: str
:param permanence: Permanence.
:type permanence: bool
:param permanence_description: A description of the permanence value.
:type permanence_description: str
:param known_issues: Known issues with the data.
:type known_issues: str
:param responsible_organization: An organization responsible for the data.
:type responsible_organization: str
:param properties: A properties entry.
:type properties: ibmpairs.catalog.Properties
:param spatial_coverage: A spatial coverage entry.
:type spatial_coverage: ibmpairs.catalog.SpatialCoverage
:param latitude_min: The minimum latitude of the Data Set.
:type latitude_min: float
:param longitude_min: The minimum longitude of the Data Set.
:type longitude_min: float
:param latitude_max: The maximum latitude of the Data Set.
:type latitude_max: float
:param longitude_max: The maximum longitude of the Data Set.
:type longitude_max: float
:param temporal_min: The minimum temporal value of the Data Set.
:type temporal_min: str
:param temporal_max: The maximum temporal value of the Data Set.
:type temporal_max: str
:param id: The Data Set ID.
:type id: str
:param key: The Data Set key.
:type key: str
:param dsource_h_link: Data source hyperlink.
:type dsource_h_link: str
:param dsource_desc: Data source description.
:type dsource_desc: str
:param status: Data Set status.
:type status: str
:param data_origin: The origin of the data contained within the Data Set.
:type data_origin: str
:param created_at: The date of creation.
:type created_at: str
:param updated_at: The last updated date.
:type updated_at: str
:param level: The default IBM PAIRS level for the Data Set.
:type level: int
:param crs: CRS.
:type crs: str
:param offering_status: The legal status of the offering.
:type offering_status: str
:param contact_person: A contact person for the Data Set.
:type contact_person: str
:param description_internal: An internal description of the Data Set.
:type description_internal: str
:param description_internal_links: A list of links that give context to the description_internal.
:type description_internal_links: List[str]
:param data_storage_mid_term: The mid term data storage for the Data Set.
:type data_storage_mid_term: str
:param data_storage_long_term: The lon term data storage for the Data Set.
:type data_storage_long_term: str
:param elt_scripts_links: Extract Load Transform script links for the Data Set.
:type elt_scripts_links: List[str]
:param license_information: License information for data in the Data Set.
:type license_information: str
:param data_set_response: A server response to a executed Data Set method call.
:type data_set_response: ibmpairs.catalog.DataSetReturn
:raises Exception: An ibmpairs.client.Client is not found.
"""
#
def __str__(self):
"""
The method creates a string representation of the internal class structure.
:returns: A string representation of the internal class structure.
:rtype: str
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __repr__(self):
"""
The method creates a dict representation of the internal class structure.
:returns: A dict representation of the internal class structure.
:rtype: dict
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __init__(self,
client: cl.Client = None,
name: str = None,
category: Category = None,
max_layers: int = None,
name_alternate: str = None,
rating: float = None,
description_short: str = None,
description_long: str = None,
description_links: List[str] = None,
data_source_name: str = None,
data_source_attribution: str = None,
data_source_description: str = None,
data_source_links: List[str] = None,
update_interval_max: str = None,
update_interval_description: str = None,
lag_horizon: str = None,
lag_horizon_description: str = None,
temporal_resolution: str = None,
temporal_resolution_description: str = None,
spatial_resolution_of_raw_data: str = None,
interpolation: str = None,
dimensions_description: str = None,
permanence: bool = None,
permanence_description: str = None,
known_issues: str = None,
responsible_organization: str = None,
properties: Properties = None,
spatial_coverage: SpatialCoverage = None,
latitude_min: float = None,
longitude_min: float = None,
latitude_max: float = None,
longitude_max: float = None,
temporal_min: str = None,
temporal_max: str = None,
id: str = None,
key: str = None,
dsource_h_link: str = None,
dsource_desc: str = None,
status: str = None,
data_origin: str = None,
created_at: str = None,
updated_at: str = None,
level: int = None,
crs: str = None,
offering_status: str = None,
contact_person: str = None,
description_internal: str = None,
description_internal_links: List[str] = None,
data_storage_mid_term: str = None,
data_storage_long_term: str = None,
elt_scripts_links: List[str] = None,
license_information: str = None,
data_set_response: DataSetReturn = None
):
self._client = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
self._name = name
self._category = category
self._max_layers = max_layers
self._name_alternate = name_alternate
self._rating = rating
self._description_short = description_short
self._description_long = description_long
self._description_links = description_links
self._data_source_name = data_source_name
self._data_source_attribution = data_source_attribution
self._data_source_description = data_source_description
self._data_source_links = data_source_links
self._update_interval_max = update_interval_max
self._update_interval_description = update_interval_description
self._lag_horizon = lag_horizon
self._lag_horizon_description = lag_horizon_description
self._temporal_resolution = temporal_resolution
self._temporal_resolution_description = temporal_resolution_description
self._spatial_resolution_of_raw_data = spatial_resolution_of_raw_data
self._interpolation = interpolation
self._dimensions_description = dimensions_description
self._permanence = permanence
self._permanence_description = permanence_description
self._known_issues = known_issues
self._responsible_organization = responsible_organization
self._properties = properties
self._spatial_coverage = spatial_coverage
self._latitude_min = latitude_min
self._longitude_min = longitude_min
self._latitude_max = latitude_max
self._longitude_max = longitude_max
self._temporal_min = temporal_min
self._temporal_max = temporal_max
self._id = id
self._key = key
self._dsource_h_link = dsource_h_link
self._dsource_desc = dsource_desc
self._status = status
self._data_origin = data_origin
self._created_at = created_at
self._updated_at = updated_at
self._level = level
self._crs = crs
self._offering_status = offering_status
self._contact_person = contact_person
self._description_internal = description_internal
self._description_internal_links = description_internal_links
self._data_storage_mid_term = data_storage_mid_term
self._data_storage_long_term = data_storage_long_term
self._elt_scripts_links = elt_scripts_links
self._license_information = license_information
if data_set_response is None:
self._data_set_response = DataSetReturn()
else:
self._data_set_response = data_set_response
#
def get_client(self):
return self._client
#
def set_client(self, c):
self._client = common.check_class(c, cl.Client)
#
def del_client(self):
del self._client
#
client = property(get_client, set_client, del_client)
#
def get_name(self):
return self._name
#
def set_name(self, name):
self._name = common.check_str(name)
#
def del_name(self):
del self._name
#
name = property(get_name, set_name, del_name)
#
def get_category(self):
return self._category
#
def set_category(self, category):
self._category = common.check_class(category, Category)
#
def del_category(self):
del self._category
#
category = property(get_category, set_category, del_category)
#
def get_max_layers(self):
return self._max_layers
#
def set_max_layers(self, max_layers):
self._max_layers = common.check_int(max_layers)
#
def del_max_layers(self):
del self._max_layers
#
max_layers = property(get_max_layers, set_max_layers, del_max_layers)
#
def get_name_alternate(self):
return self._name_alternate
#
def set_name_alternate(self, name_alternate):
self._name_alternate = common.check_str(name_alternate)
#
def del_name_alternate(self):
del self._name_alternate
#
name_alternate = property(get_name_alternate, set_name_alternate, del_name_alternate)
#
def get_rating(self):
return self._rating
#
def set_rating(self, rating):
self._rating = common.check_float(rating)
#
def del_rating(self):
del self._rating
#
rating = property(get_rating, set_rating, del_rating)
#
def get_description_short(self):
return self._description_short
#
def set_description_short(self, description_short):
self._description_short = common.check_str(description_short)
#
def del_description_short(self):
del self._description_short
#
description_short = property(get_description_short, set_description_short, del_description_short)
#
def get_description_long(self):
return self._description_long
#
def set_description_long(self, description_long):
self._description_long = common.check_str(description_long)
#
def del_description_long(self):
del self._description_long
#
description_long = property(get_description_long, set_description_long, del_description_long)
#
def get_description_links(self):
return self._description_links
#
def set_description_links(self, description_links):
self._description_links = common.check_class(description_links, List[str])
#
def del_description_links(self):
del self._description_links
#
description_links = property(get_description_links, set_description_links, del_description_links)
#
def get_data_source_name(self):
return self._data_source_name
#
def set_data_source_name(self, data_source_name):
self._data_source_name = common.check_str(data_source_name)
#
def del_data_source_name(self):
del self._data_source_name
#
data_source_name = property(get_data_source_name, set_data_source_name, del_data_source_name)
#
def get_data_source_attribution(self):
return self._data_source_attribution
#
def set_data_source_attribution(self, data_source_attribution):
self._data_source_attribution = common.check_str(data_source_attribution)
#
def del_data_source_attribution(self):
del self._data_source_attribution
#
data_source_attribution = property(get_data_source_attribution, set_data_source_attribution, del_data_source_attribution)
#
def get_data_source_description(self):
return self._data_source_description
#
def set_data_source_description(self, data_source_description):
self._data_source_description = common.check_str(data_source_description)
#
def del_data_source_description(self):
del self._data_source_description
#
data_source_description = property(get_data_source_description, set_data_source_description, del_data_source_description)
#
def get_data_source_links(self):
return self._data_source_links
#
def set_data_source_links(self, data_source_links):
self._data_source_links = common.check_class(data_source_links, List[str])
#
def del_data_source_links(self):
del self._data_source_links
#
data_source_links = property(get_data_source_links, set_data_source_links, del_data_source_links)
#
def get_update_interval_max(self):
return self._update_interval_max
#
def set_update_interval_max(self, update_interval_max):
self._update_interval_max = common.check_str(update_interval_max)
#
def del_update_interval_max(self):
del self._update_interval_max
#
update_interval_max = property(get_update_interval_max, set_update_interval_max, del_update_interval_max)
#
def get_update_interval_description(self):
return self._update_interval_description
#
def set_update_interval_description(self, update_interval_description):
self._update_interval_description = common.check_str(update_interval_description)
#
def del_update_interval_description(self):
del self._update_interval_description
#
update_interval_description = property(get_update_interval_description, set_update_interval_description, del_update_interval_description)
#
def get_lag_horizon(self):
return self._lag_horizon
#
def set_lag_horizon(self, lag_horizon):
self._lag_horizon = common.check_str(lag_horizon)
#
def del_lag_horizon(self):
del self._lag_horizon
#
lag_horizon = property(get_lag_horizon, set_lag_horizon, del_lag_horizon)
#
def get_lag_horizon_description(self):
return self._lag_horizon_description
#
def set_lag_horizon_description(self, lag_horizon_description):
self._lag_horizon_description = common.check_str(lag_horizon_description)
#
def del_lag_horizon_description(self):
del self._lag_horizon_description
#
lag_horizon_description = property(get_lag_horizon_description, set_lag_horizon_description, del_lag_horizon_description)
#
def get_temporal_resolution(self):
return self._temporal_resolution
#
def set_temporal_resolution(self, temporal_resolution):
self._temporal_resolution = common.check_str(temporal_resolution)
#
def del_temporal_resolution(self):
del self._temporal_resolution
#
temporal_resolution = property(get_temporal_resolution, set_temporal_resolution, del_temporal_resolution)
#
def get_temporal_resolution_description(self):
return self._temporal_resolution_description
#
def set_temporal_resolution_description(self, temporal_resolution_description):
self._temporal_resolution_description = common.check_str(temporal_resolution_description)
#
def del_temporal_resolution_description(self):
del self._temporal_resolution_description
#
temporal_resolution_description = property(get_temporal_resolution_description, set_temporal_resolution_description, del_temporal_resolution_description)
#
def get_spatial_resolution_of_raw_data(self):
return self._spatial_resolution_of_raw_data
#
def set_spatial_resolution_of_raw_data(self, spatial_resolution_of_raw_data):
self._spatial_resolution_of_raw_data = common.check_str(spatial_resolution_of_raw_data)
#
def del_spatial_resolution_of_raw_data(self):
del self._spatial_resolution_of_raw_data
#
spatial_resolution_of_raw_data = property(get_spatial_resolution_of_raw_data, set_spatial_resolution_of_raw_data, del_spatial_resolution_of_raw_data)
#
def get_interpolation(self):
return self._interpolation
#
def set_interpolation(self, interpolation):
self._interpolation = common.check_str(interpolation)
#
def del_interpolation(self):
del self._interpolation
#
interpolation = property(get_interpolation, set_interpolation, del_interpolation)
#
def get_dimensions_description(self):
return self._dimensions_description
#
def set_dimensions_description(self, dimensions_description):
self._dimensions_description = common.check_str(dimensions_description)
#
def del_dimensions_description(self):
del self._dimensions_description
#
dimensions_description = property(get_dimensions_description, set_dimensions_description, del_dimensions_description)
#
def get_permanence(self):
return self._permanence
#
def set_permanence(self, permanence):
self._permanence = common.check_bool(permanence)
#
def del_permanence(self):
del self._permanence
#
permanence = property(get_permanence, set_permanence, del_permanence)
#
def get_permanence_description(self):
return self._permanence_description
#
def set_permanence_description(self, permanence_description):
self._permanence_description = common.check_str(permanence_description)
#
def del_permanence_description(self):
del self._permanence_description
#
permanence_description = property(get_permanence_description, set_permanence_description, del_permanence_description)
#
def get_known_issues(self):
return self._known_issues
#
def set_known_issues(self, known_issues):
self._known_issues = common.check_str(known_issues)
#
def del_known_issues(self):
del self._known_issues
#
known_issues = property(get_known_issues, set_known_issues, del_known_issues)
#
def get_responsible_organization(self):
return self._responsible_organization
#
def set_responsible_organization(self, responsible_organization):
self._responsible_organization = common.check_str(responsible_organization)
#
def del_responsible_organization(self):
del self._responsible_organization
#
responsible_organization = property(get_responsible_organization, set_responsible_organization, del_responsible_organization)
#
def get_properties(self):
return self._properties
#
def set_properties(self, properties):
self._properties = common.check_class(properties, Properties)
#
def del_properties(self):
del self._properties
#
properties = property(get_properties, set_properties, del_properties)
#
def get_spatial_coverage(self):
return self._spatial_coverage
#
def set_spatial_coverage(self, spatial_coverage):
self._spatial_coverage = common.check_class(spatial_coverage, SpatialCoverage)
#
def del_spatial_coverage(self):
del self._spatial_coverage
#
spatial_coverage = property(get_spatial_coverage, set_spatial_coverage, del_spatial_coverage)
#
def get_latitude_min(self):
return self._latitude_min
#
def set_latitude_min(self, latitude_min):
self._latitude_min = common.check_float(latitude_min)
#
def del_latitude_min(self):
del self._latitude_min
#
latitude_min = property(get_latitude_min, set_latitude_min, del_latitude_min)
#
def get_longitude_min(self):
return self._longitude_min
#
def set_longitude_min(self, longitude_min):
self._longitude_min = common.check_float(longitude_min)
#
def del_longitude_min(self):
del self._longitude_min
#
longitude_min = property(get_longitude_min, set_longitude_min, del_longitude_min)
#
def get_latitude_max(self):
return self._latitude_max
#
def set_latitude_max(self, latitude_max):
self._latitude_max = common.check_float(latitude_max)
#
def del_latitude_max(self):
del self._latitude_max
#
latitude_max = property(get_latitude_max, set_latitude_max, del_latitude_max)
#
def get_longitude_max(self):
return self._longitude_max
#
def set_longitude_max(self, longitude_max):
self._longitude_max = common.check_float(longitude_max)
#
def del_longitude_max(self):
del self._longitude_max
#
longitude_max = property(get_longitude_max, set_longitude_max, del_longitude_max)
#
def get_temporal_min(self):
return self._temporal_min
#
def set_temporal_min(self, temporal_min):
self._temporal_min = common.check_str(temporal_min)
#
def del_temporal_min(self):
del self._temporal_min
#
temporal_min = property(get_temporal_min, set_temporal_min, del_temporal_min)
#
def get_temporal_max(self):
return self._temporal_max
#
def set_temporal_max(self, temporal_max):
self._temporal_max = common.check_str(temporal_max)
#
def del_temporal_max(self):
del self._temporal_max
#
temporal_max = property(get_temporal_max, set_temporal_max, del_temporal_max)
#
def get_id(self):
return self._id
#
def set_id(self, id):
self._id = common.check_str(id)
#
def del_id(self):
del self._id
#
id = property(get_id, set_id, del_id)
#
def get_key(self):
return self._key
#
def set_key(self, key):
self._key = common.check_str(key)
#
def del_key(self):
del self._key
#
key = property(get_key, set_key, del_key)
#
def get_dsource_h_link(self):
return self._dsource_h_link
#
def set_dsource_h_link(self, dsource_h_link):
self._dsource_h_link = common.check_str(dsource_h_link)
#
def del_dsource_h_link(self):
del self._dsource_h_link
#
dsource_h_link = property(get_dsource_h_link, set_dsource_h_link, del_dsource_h_link)
#
def get_dsource_desc(self):
return self._dsource_desc
#
def set_dsource_desc(self, dsource_desc):
self._dsource_desc = common.check_str(dsource_desc)
#
def del_dsource_desc(self):
del self._dsource_desc
#
dsource_desc = property(get_dsource_desc, set_dsource_desc, del_dsource_desc)
#
def get_status(self):
return self._status
#
def set_status(self, status):
self._status = common.check_str(status)
#
def del_status(self):
del self._status
#
status = property(get_status, set_status, del_status)
#
def get_data_origin(self):
return self._data_origin
#
def set_data_origin(self, data_origin):
self._data_origin = common.check_str(data_origin)
#
def del_data_origin(self):
del self._data_origin
#
data_origin = property(get_data_origin, set_data_origin, del_data_origin)
#
def get_created_at(self):
return self._created_at
#
def set_created_at(self, created_at):
self._created_at = common.check_str(created_at)
#
def del_created_at(self):
del self._created_at
#
created_at = property(get_created_at, set_created_at, del_created_at)
#
def get_updated_at(self):
return self._updated_at
#
def set_updated_at(self, updated_at):
self._updated_at = common.check_str(updated_at)
#
def del_updated_at(self):
del self._updated_at
#
updated_at = property(get_updated_at, set_updated_at, del_updated_at)
#
def get_level(self):
return self._level
#
def set_level(self, level):
self._level = common.check_int(level)
#
def del_level(self):
del self._level
#
level = property(get_level, set_level, del_level)
#
def get_crs(self):
return self._crs
#
def set_crs(self, crs):
self._crs = common.check_str(crs)
#
def del_crs(self):
del self._crs
#
crs = property(get_crs, set_crs, del_crs)
#
def get_offering_status(self):
return self._offering_status
#
def set_offering_status(self, offering_status):
self._offering_status = common.check_str(offering_status)
#
def del_offering_status(self):
del self._offering_status
#
offering_status = property(get_offering_status, set_offering_status, del_offering_status)
#
def get_contact_person(self):
return self._contact_person
#
def set_contact_person(self, contact_person):
self._contact_person = common.check_str(contact_person)
#
def del_contact_person(self):
del self._contact_person
#
contact_person = property(get_contact_person, set_contact_person, del_contact_person)
#
def get_description_internal(self):
return self._description_internal
#
def set_description_internal(self, description_internal):
self._description_internal = common.check_str(description_internal)
#
def del_description_internal(self):
del self._description_internal
#
description_internal = property(get_description_internal, set_description_internal, del_description_internal)
#
def get_description_internal_links(self):
return self._description_internal_links
#
def set_description_internal_links(self, description_internal_links):
self._description_internal_links = common.check_class(description_internal_links, List[str])
#
def del_description_internal_links(self):
del self._description_internal_links
#
description_internal_links = property(get_description_internal_links, set_description_internal_links, del_description_internal_links)
#
def get_data_storage_mid_term(self):
return self._data_storage_mid_term
#
def set_data_storage_mid_term(self, data_storage_mid_term):
self._data_storage_mid_term = common.check_str(data_storage_mid_term)
#
def del_data_storage_mid_term(self):
del self._data_storage_mid_term
#
data_storage_mid_term = property(get_data_storage_mid_term, set_data_storage_mid_term, del_data_storage_mid_term)
#
def get_data_storage_long_term(self):
return self._data_storage_long_term
#
def set_data_storage_long_term(self, data_storage_long_term):
self._data_storage_long_term = common.check_str(data_storage_long_term)
#
def del_data_storage_long_term(self):
del self._data_storage_long_term
#
data_storage_long_term = property(get_data_storage_long_term, set_data_storage_long_term, del_data_storage_long_term)
#
def get_elt_scripts_links(self):
return self._elt_scripts_links
#
def set_elt_scripts_links(self, elt_scripts_links):
self._elt_scripts_links = common.check_class(elt_scripts_links, List[str])
#
def del_elt_scripts_links(self):
del self._elt_scripts_links
#
elt_scripts_links = property(get_elt_scripts_links, set_elt_scripts_links, del_elt_scripts_links)
#
def get_license_information(self):
return self._license_information
#
def set_license_information(self, license_information):
self._license_information = common.check_str(license_information)
#
def del_license_information(self):
del self._license_information
#
license_information = property(get_license_information, set_license_information, del_license_information)
#
def get_data_set_response(self):
return self._data_set_response
#
def set_data_set_response(self, data_set_response):
self._data_set_response = common.check_class(data_set_response, DataSetReturn)
#
def del_data_set_response(self):
del self._data_set_response
#
data_set_response = property(get_data_set_response, set_data_set_response, del_data_set_response)
#
def from_dict(data_set_dict: Any):
"""
Create a DataSet object from a dictionary.
:param data_set_dict: A dictionary that contains the keys of a DataSet.
:type data_set_dict: Any
:rtype: ibmpairs.catalog.DataSet
:raises Exception: if not a dictionary.
"""
name = None
category = None
max_layers = None
name_alternate = None
rating = None
description_short = None
description_long = None
description_links = None
data_source_name = None
data_source_attribution = None
data_source_description = None
data_source_links = None
update_interval_max = None
update_interval_description = None
lag_horizon = None
lag_horizon_description = None
temporal_resolution = None
temporal_resolution_description = None
spatial_resolution_of_raw_data = None
interpolation = None
dimensions_description = None
permanence = None
permanence_description = None
known_issues = None
responsible_organization = None
properties = None
spatial_coverage = None
latitude_min = None
longitude_min = None
latitude_max = None
longitude_max = None
temporal_min = None
temporal_max = None
id = None
key = None
dsource_h_link = None
dsource_desc = None
status = None
data_origin = None
created_at = None
updated_at = None
level = None
crs = None
offering_status = None
contact_person = None
description_internal = None
description_internal_links = None
data_storage_mid_term = None
data_storage_long_term = None
elt_scripts_links = None
license_information = None
data_set_response = None
common.check_dict(data_set_dict)
if "name" in data_set_dict:
if data_set_dict.get("name") is not None:
name = common.check_str(data_set_dict.get("name"))
if "category" in data_set_dict:
if data_set_dict.get("category") is not None:
category = Category.from_dict(data_set_dict.get("category"))
if "maxLayers" in data_set_dict:
if data_set_dict.get("maxLayers") is not None:
max_layers = common.check_int(data_set_dict.get("maxLayers"))
elif "max_layers" in data_set_dict:
if data_set_dict.get("max_layers") is not None:
max_layers = common.check_int(data_set_dict.get("max_layers"))
if "name_alternate" in data_set_dict:
if data_set_dict.get("name_alternate") is not None:
name_alternate = common.check_str(data_set_dict.get("name_alternate"))
if "rating" in data_set_dict:
if data_set_dict.get("rating") is not None:
rating = common.check_float(data_set_dict.get("rating"))
if "description_short" in data_set_dict:
if data_set_dict.get("description_short") is not None:
description_short = common.check_str(data_set_dict.get("description_short"))
if "description_long" in data_set_dict:
if data_set_dict.get("description_long") is not None:
description_long = common.check_str(data_set_dict.get("description_long"))
if "description_links" in data_set_dict:
if data_set_dict.get("description_links") is not None:
description_links = common.from_list(data_set_dict.get("description_links"), common.check_str)
if "data_source_name" in data_set_dict:
if data_set_dict.get("data_source_name") is not None:
data_source_name = common.check_str(data_set_dict.get("data_source_name"))
if "data_source_attribution" in data_set_dict:
if data_set_dict.get("data_source_attribution") is not None:
data_source_attribution = common.check_str(data_set_dict.get("data_source_attribution"))
if "data_source_description" in data_set_dict:
if data_set_dict.get("data_source_description") is not None:
data_source_description = common.check_str(data_set_dict.get("data_source_description"))
if "data_source_links" in data_set_dict:
if data_set_dict.get("data_source_links") is not None:
data_source_links = common.from_list(data_set_dict.get("data_source_links"), common.check_str)
if "update_interval_max" in data_set_dict:
if data_set_dict.get("update_interval_max") is not None:
update_interval_max = common.check_str(data_set_dict.get("update_interval_max"))
if "update_interval_description" in data_set_dict:
if data_set_dict.get("update_interval_description") is not None:
update_interval_description = common.check_str(data_set_dict.get("update_interval_description"))
if "lag_horizon" in data_set_dict:
if data_set_dict.get("lag_horizon") is not None:
lag_horizon = common.check_str(data_set_dict.get("lag_horizon"))
if "lag_horizon_description" in data_set_dict:
if data_set_dict.get("lag_horizon_description") is not None:
lag_horizon_description = common.check_str(data_set_dict.get("lag_horizon_description"))
if "temporal_resolution" in data_set_dict:
if data_set_dict.get("temporal_resolution") is not None:
temporal_resolution = common.check_str(data_set_dict.get("temporal_resolution"))
if "temporal_resolution_description" in data_set_dict:
if data_set_dict.get("temporal_resolution_description") is not None:
temporal_resolution_description = common.check_str(data_set_dict.get("temporal_resolution_description"))
if "spatial_resolution_of_raw_data" in data_set_dict:
if data_set_dict.get("spatial_resolution_of_raw_data") is not None:
spatial_resolution_of_raw_data = common.check_str(data_set_dict.get("spatial_resolution_of_raw_data"))
if "interpolation" in data_set_dict:
if data_set_dict.get("interpolation") is not None:
interpolation = common.check_str(data_set_dict.get("interpolation"))
if "dimensions_description" in data_set_dict:
if data_set_dict.get("dimensions_description") is not None:
dimensions_description = common.check_str(data_set_dict.get("dimensions_description"))
if "permanence" in data_set_dict:
if data_set_dict.get("permanence") is not None:
permanence = common.check_bool(data_set_dict.get("permanence"))
if "permanence_description" in data_set_dict:
if data_set_dict.get("permanence_description") is not None:
permanence_description = common.check_str(data_set_dict.get("permanence_description"))
if "known_issues" in data_set_dict:
if data_set_dict.get("known_issues") is not None:
known_issues = common.check_str(data_set_dict.get("known_issues"))
if "responsible_organization" in data_set_dict:
if data_set_dict.get("responsible_organization") is not None:
responsible_organization = common.check_str(data_set_dict.get("responsible_organization"))
if "properties" in data_set_dict:
if data_set_dict.get("properties") is not None:
properties = Properties.from_dict(data_set_dict.get("properties"))
if "spatial_coverage" in data_set_dict:
if data_set_dict.get("spatial_coverage") is not None:
spatial_coverage = SpatialCoverage.from_dict(data_set_dict.get("spatial_coverage"))
if "latitude_min" in data_set_dict:
if data_set_dict.get("latitude_min") is not None:
latitude_min = common.check_float(data_set_dict.get("latitude_min"))
if "longitude_min" in data_set_dict:
if data_set_dict.get("longitude_min") is not None:
longitude_min = common.check_float(data_set_dict.get("longitude_min"))
if "latitude_max" in data_set_dict:
if data_set_dict.get("latitude_max") is not None:
latitude_max = common.check_float(data_set_dict.get("latitude_max"))
if "longitude_max" in data_set_dict:
if data_set_dict.get("longitude_max") is not None:
longitude_max = common.check_float(data_set_dict.get("longitude_max"))
if "temporal_min" in data_set_dict:
if data_set_dict.get("temporal_min") is not None:
temporal_min = common.check_str(data_set_dict.get("temporal_min"))
if "temporal_max" in data_set_dict:
if data_set_dict.get("temporal_max") is not None:
temporal_max = common.check_str(data_set_dict.get("temporal_max"))
if "id" in data_set_dict:
if data_set_dict.get("id") is not None:
id = common.check_str(data_set_dict.get("id"))
if "key" in data_set_dict:
if data_set_dict.get("key") is not None:
key = common.check_str(data_set_dict.get("key"))
if "dsourceHLink" in data_set_dict:
if data_set_dict.get("dsourceHLink") is not None:
dsource_h_link = common.check_str(data_set_dict.get("dsourceHLink"))
elif "dsource_h_link" in data_set_dict:
if data_set_dict.get("dsource_h_link") is not None:
dsource_h_link = common.check_str(data_set_dict.get("dsource_h_link"))
if "dsourceDesc" in data_set_dict:
if data_set_dict.get("dsourceDesc") is not None:
dsource_desc = common.check_str(data_set_dict.get("dsourceDesc"))
elif "dsource_desc" in data_set_dict:
if data_set_dict.get("dsource_desc") is not None:
dsource_desc = common.check_str(data_set_dict.get("dsource_desc"))
if "status" in data_set_dict:
if data_set_dict.get("status") is not None:
status = common.check_str(data_set_dict.get("status"))
if "dataOrigin" in data_set_dict:
if data_set_dict.get("dataOrigin") is not None:
data_origin = common.check_str(data_set_dict.get("dataOrigin"))
elif "data_origin" in data_set_dict:
if data_set_dict.get("data_origin") is not None:
data_origin = common.check_str(data_set_dict.get("data_origin"))
if "created_at" in data_set_dict:
if data_set_dict.get("created_at") is not None:
created_at = common.check_str(data_set_dict.get("created_at"))
if "updated_at" in data_set_dict:
if data_set_dict.get("updated_at") is not None:
updated_at = common.check_str(data_set_dict.get("updated_at"))
if "level" in data_set_dict:
if data_set_dict.get("level") is not None:
level = common.check_int(data_set_dict.get("level"))
if "crs" in data_set_dict:
if data_set_dict.get("crs") is not None:
crs = common.check_str(data_set_dict.get("crs"))
if "offering_status" in data_set_dict:
if data_set_dict.get("offering_status") is not None:
offering_status = common.check_str(data_set_dict.get("offering_status"))
if "contact_person" in data_set_dict:
if data_set_dict.get("contact_person") is not None:
contact_person = common.check_str(data_set_dict.get("contact_person"))
if "description_internal" in data_set_dict:
if data_set_dict.get("description_internal") is not None:
description_internal = common.check_str(data_set_dict.get("description_internal"))
if "description_internal_links" in data_set_dict:
if data_set_dict.get("description_internal_links") is not None:
description_internal_links = common.from_list(data_set_dict.get("description_internal_links"), common.check_str)
if "data_storage_mid_term" in data_set_dict:
if data_set_dict.get("data_storage_mid_term") is not None:
data_storage_mid_term = common.check_str(data_set_dict.get("data_storage_mid_term"))
if "data_storage_long_term" in data_set_dict:
if data_set_dict.get("data_storage_long_term") is not None:
data_storage_long_term = common.check_str(data_set_dict.get("data_storage_long_term"))
if "elt_scripts_links" in data_set_dict:
if data_set_dict.get("elt_scripts_links") is not None:
elt_scripts_links = common.from_list(data_set_dict.get("elt_scripts_links"), common.check_str)
if "license_information" in data_set_dict:
if data_set_dict.get("license_information") is not None:
license_information = common.check_str(data_set_dict.get("license_information"))
if "data_set_response" in data_set_dict:
if data_set_dict.get("data_set_response") is not None:
data_set_response = DataSetReturn.from_dict(data_set_dict.get("data_set_response"))
return DataSet(name = name,
category = category,
max_layers = max_layers,
name_alternate = name_alternate,
rating = rating,
description_short = description_short,
description_long = description_long,
description_links = description_links,
data_source_name = data_source_name,
data_source_attribution = data_source_attribution,
data_source_description = data_source_description,
data_source_links = data_source_links,
update_interval_max = update_interval_max,
update_interval_description = update_interval_description,
lag_horizon = lag_horizon,
lag_horizon_description = lag_horizon_description,
temporal_resolution = temporal_resolution,
temporal_resolution_description = temporal_resolution_description,
spatial_resolution_of_raw_data = spatial_resolution_of_raw_data,
interpolation = interpolation,
dimensions_description = dimensions_description,
permanence = permanence,
permanence_description = permanence_description,
known_issues = known_issues,
responsible_organization = responsible_organization,
properties = properties,
spatial_coverage = spatial_coverage,
latitude_min = latitude_min,
longitude_min = longitude_min,
latitude_max = latitude_max,
longitude_max = longitude_max,
temporal_min = temporal_min,
temporal_max = temporal_max,
id = id,
key = key,
dsource_h_link = dsource_h_link,
dsource_desc = dsource_desc,
status = status,
data_origin = data_origin,
created_at = created_at,
updated_at = updated_at,
level = level,
crs = crs,
offering_status = offering_status,
contact_person = contact_person,
description_internal = description_internal,
description_internal_links = description_internal_links,
data_storage_mid_term = data_storage_mid_term,
data_storage_long_term = data_storage_long_term,
elt_scripts_links = elt_scripts_links,
license_information = license_information,
data_set_response = data_set_response
)
#
def to_dict(self):
"""
Create a dictionary from the objects structure.
:rtype: dict
"""
data_set_dict: dict = {}
if self._name is not None:
data_set_dict["name"] = self._name
if self._category is not None:
data_set_dict["category"] = common.class_to_dict(self._category, Category)
if self._max_layers is not None:
data_set_dict["max_layers"] = self._max_layers
if self._name_alternate is not None:
data_set_dict["name_alternate"] = self._name_alternate
if self._rating is not None:
data_set_dict["rating"] = self._rating
if self._description_short is not None:
data_set_dict["description_short"] = self._description_short
if self._description_long is not None:
data_set_dict["description_long"] = self._description_long
if self._description_links is not None:
data_set_dict["description_links"] = common.from_list(self._description_links, common.check_str)
if self._data_source_name is not None:
data_set_dict["data_source_name"] = self._data_source_name
if self._data_source_attribution is not None:
data_set_dict["data_source_attribution"] = self._data_source_attribution
if self._data_source_description is not None:
data_set_dict["data_source_description"] = self._data_source_description
if self._data_source_links is not None:
data_set_dict["data_source_links"] = common.from_list(self._data_source_links, common.check_str)
if self._update_interval_max is not None:
data_set_dict["update_interval_max"] = self._update_interval_max
if self._update_interval_description is not None:
data_set_dict["update_interval_description"] = self._update_interval_description
if self._lag_horizon is not None:
data_set_dict["lag_horizon"] = self._lag_horizon
if self._lag_horizon_description is not None:
data_set_dict["lag_horizon_description"] = self._lag_horizon_description
if self._temporal_resolution is not None:
data_set_dict["temporal_resolution"] = self._temporal_resolution
if self._temporal_resolution_description is not None:
data_set_dict["temporal_resolution_description"] = self._temporal_resolution_description
if self._spatial_resolution_of_raw_data is not None:
data_set_dict["spatial_resolution_of_raw_data"] = self._spatial_resolution_of_raw_data
if self._interpolation is not None:
data_set_dict["interpolation"] = self._interpolation
if self._dimensions_description is not None:
data_set_dict["dimensions_description"] = self._dimensions_description
if self._permanence is not None:
data_set_dict["permanence"] = self._permanence
if self._permanence_description is not None:
data_set_dict["permanence_description"] = self._permanence_description
if self._known_issues is not None:
data_set_dict["known_issues"] = self._known_issues
if self._responsible_organization is not None:
data_set_dict["responsible_organization"] = self._responsible_organization
if self._properties is not None:
data_set_dict["properties"] = common.class_to_dict(self._properties, Properties)
if self._spatial_coverage is not None:
data_set_dict["spatial_coverage"] = common.class_to_dict(self._spatial_coverage, SpatialCoverage)
if self._latitude_min is not None:
data_set_dict["latitude_min"] = self._latitude_min
if self._longitude_min is not None:
data_set_dict["longitude_min"] = self._longitude_min
if self._latitude_max is not None:
data_set_dict["latitude_max"] = self._latitude_max
if self._longitude_max is not None:
data_set_dict["longitude_max"] = self._longitude_max
if self._temporal_min is not None:
data_set_dict["temporal_min"] = self._temporal_min
if self._temporal_max is not None:
data_set_dict["temporal_max"] = self._temporal_max
if self._id is not None:
data_set_dict["id"] = self._id
if self._key is not None:
data_set_dict["key"] = self._key
if self._dsource_h_link is not None:
data_set_dict["dsource_h_link"] = self._dsource_h_link
if self._dsource_desc is not None:
data_set_dict["dsource_desc"] = self._dsource_desc
if self._status is not None:
data_set_dict["status"] = self._status
if self._data_origin is not None:
data_set_dict["data_origin"] = self._data_origin
if self._created_at is not None:
data_set_dict["created_at"] = self._created_at
if self._updated_at is not None:
data_set_dict["updated_at"] = self._updated_at
if self._level is not None:
data_set_dict["level"] = self._level
if self._crs is not None:
data_set_dict["crs"] = self._crs
if self._offering_status is not None:
data_set_dict["offering_status"] = self._offering_status
if self._contact_person is not None:
data_set_dict["contact_person"] = self._contact_person
if self._description_internal is not None:
data_set_dict["description_internal"] = self._description_internal
if self._description_internal_links is not None:
data_set_dict["description_internal_links"] = common.from_list(self._description_internal_links, common.check_str)
if self._data_storage_mid_term is not None:
data_set_dict["data_storage_mid_term"] = self._data_storage_mid_term
if self._data_storage_long_term is not None:
data_set_dict["data_storage_long_term"] = self._data_storage_long_term
if self._elt_scripts_links is not None:
data_set_dict["elt_scripts_links"] = common.from_list(self._elt_scripts_links, common.check_str)
if self._license_information is not None:
data_set_dict["license_information"] = self._license_information
if self._data_set_response is not None:
data_set_dict["data_set_response"] = common.class_to_dict(self._data_set_response, DataSetReturn)
return data_set_dict
#
def to_dict_data_set_post(self):
"""
Create a dictionary from the objects structure ready for a POST operation.
:rtype: dict
"""
data_set_dict: dict = {}
# Common
if self._name is not None:
data_set_dict["name"] = self._name
if self._category is not None:
data_set_dict["category"] = common.class_to_dict(self._category, Category)
if self._max_layers is not None:
data_set_dict["maxLayers"] = self._max_layers
if self._name_alternate is not None:
data_set_dict["name_alternate"] = self._name_alternate
if self._rating is not None:
data_set_dict["rating"] = self._rating
if self._description_short is not None:
data_set_dict["description_short"] = self._description_short
if self._description_long is not None:
data_set_dict["description_long"] = self._description_long
if self._description_links is not None:
data_set_dict["description_links"] = common.from_list(self._description_links, common.check_str)
if self._data_source_name is not None:
data_set_dict["data_source_name"] = self._data_source_name
if self._data_source_attribution is not None:
data_set_dict["data_source_attribution"] = self._data_source_attribution
if self._data_source_description is not None:
data_set_dict["data_source_description"] = self._data_source_description
if self._data_source_links is not None:
data_set_dict["data_source_links"] = common.from_list(self._data_source_links, common.check_str)
if self._update_interval_max is not None:
data_set_dict["update_interval_max"] = self._update_interval_max
if self._update_interval_description is not None:
data_set_dict["update_interval_description"] = self._update_interval_description
if self._lag_horizon is not None:
data_set_dict["lag_horizon"] = self._lag_horizon
if self._lag_horizon_description is not None:
data_set_dict["lag_horizon_description"] = self._lag_horizon_description
if self._temporal_resolution is not None:
data_set_dict["temporal_resolution"] = self._temporal_resolution
if self._temporal_resolution_description is not None:
data_set_dict["temporal_resolution_description"] = self._temporal_resolution_description
if self._spatial_resolution_of_raw_data is not None:
data_set_dict["spatial_resolution_of_raw_data"] = self._spatial_resolution_of_raw_data
if self._interpolation is not None:
data_set_dict["interpolation"] = self._interpolation
if self._dimensions_description is not None:
data_set_dict["dimensions_description"] = self._dimensions_description
if self._permanence is not None:
data_set_dict["permanence"] = self._permanence
if self._permanence_description is not None:
data_set_dict["permanence_description"] = self._permanence_description
if self._known_issues is not None:
data_set_dict["known_issues"] = self._known_issues
if self._responsible_organization is not None:
data_set_dict["responsible_organization"] = self._responsible_organization
if self._properties is not None:
data_set_dict["properties"] = common.class_to_dict(self._properties, Properties)
if self._spatial_coverage is not None:
data_set_dict["spatial_coverage"] = common.class_to_dict(self._spatial_coverage, SpatialCoverage)
if self._latitude_min is not None:
data_set_dict["latitude_min"] = self._latitude_min
if self._longitude_min is not None:
data_set_dict["longitude_min"] = self._longitude_min
if self._latitude_max is not None:
data_set_dict["latitude_max"] = self._latitude_max
if self._longitude_max is not None:
data_set_dict["longitude_max"] = self._longitude_max
if self._temporal_min is not None:
data_set_dict["temporal_min"] = self._temporal_min
if self._temporal_max is not None:
data_set_dict["temporal_max"] = self._temporal_max
# CREATE (POST)
if self._level is not None:
data_set_dict["level"] = self._level
if self._crs is not None:
data_set_dict["crs"] = self._crs
if self._offering_status is not None:
data_set_dict["offering_status"] = self._offering_status
return data_set_dict
#
def to_dict_data_set_put(self):
"""
Create a dictionary from the objects structure ready for a PUT operation.
:rtype: dict
"""
data_set_dict: dict = {}
# Common
if self._name is not None:
data_set_dict["name"] = self._name
if self._category is not None:
data_set_dict["category"] = common.class_to_dict(self._category, Category)
if self._max_layers is not None:
data_set_dict["maxLayers"] = self._max_layers
if self._name_alternate is not None:
data_set_dict["name_alternate"] = self._name_alternate
if self._rating is not None:
data_set_dict["rating"] = self._rating
if self._description_short is not None:
data_set_dict["description_short"] = self._description_short
if self._description_long is not None:
data_set_dict["description_long"] = self._description_long
if self._description_links is not None:
data_set_dict["description_links"] = common.from_list(self._description_links, common.check_str)
if self._data_source_name is not None:
data_set_dict["data_source_name"] = self._data_source_name
if self._data_source_attribution is not None:
data_set_dict["data_source_attribution"] = self._data_source_attribution
if self._data_source_description is not None:
data_set_dict["data_source_description"] = self._data_source_description
if self._data_source_links is not None:
data_set_dict["data_source_links"] = common.from_list(self._data_source_links, common.check_str)
if self._update_interval_max is not None:
data_set_dict["update_interval_max"] = self._update_interval_max
if self._update_interval_description is not None:
data_set_dict["update_interval_description"] = self._update_interval_description
if self._lag_horizon is not None:
data_set_dict["lag_horizon"] = self._lag_horizon
if self._lag_horizon_description is not None:
data_set_dict["lag_horizon_description"] = self._lag_horizon_description
if self._temporal_resolution is not None:
data_set_dict["temporal_resolution"] = self._temporal_resolution
if self._temporal_resolution_description is not None:
data_set_dict["temporal_resolution_description"] = self._temporal_resolution_description
if self._spatial_resolution_of_raw_data is not None:
data_set_dict["spatial_resolution_of_raw_data"] = self._spatial_resolution_of_raw_data
if self._interpolation is not None:
data_set_dict["interpolation"] = self._interpolation
if self._dimensions_description is not None:
data_set_dict["dimensions_description"] = self._dimensions_description
if self._permanence is not None:
data_set_dict["permanence"] = self._permanence
if self._permanence_description is not None:
data_set_dict["permanence_description"] = self._permanence_description
if self._known_issues is not None:
data_set_dict["known_issues"] = self._known_issues
if self._responsible_organization is not None:
data_set_dict["responsible_organization"] = self._responsible_organization
if self._properties is not None:
data_set_dict["properties"] = common.class_to_dict(self._properties, Properties)
if self._spatial_coverage is not None:
data_set_dict["spatial_coverage"] = common.class_to_dict(self._spatial_coverage, SpatialCoverage)
if self._latitude_min is not None:
data_set_dict["latitude_min"] = self._latitude_min
if self._longitude_min is not None:
data_set_dict["longitude_min"] = self._longitude_min
if self._latitude_max is not None:
data_set_dict["latitude_max"] = self._latitude_max
if self._longitude_max is not None:
data_set_dict["longitude_max"] = self._longitude_max
if self._temporal_min is not None:
data_set_dict["temporal_min"] = self._temporal_min
if self._temporal_max is not None:
data_set_dict["temporal_max"] = self._temporal_max
# UPDATE (PUT)
if self._contact_person is not None:
data_set_dict["contact_person"] = self._contact_person
if self._description_internal is not None:
data_set_dict["description_internal"] = self._description_internal
if self._description_internal_links is not None:
data_set_dict["description_internal_links"] = common.from_list(self._description_internal_links, common.check_str)
if self._data_storage_mid_term is not None:
data_set_dict["data_storage_mid_term"] = self._data_storage_mid_term
if self._data_storage_long_term is not None:
data_set_dict["data_storage_long_term"] = self._data_storage_long_term
if self._elt_scripts_links is not None:
data_set_dict["elt_scripts_links"] = common.from_list(self._elt_scripts_links, common.check_str)
if self._license_information is not None:
data_set_dict["license_information"] = self._license_information
return data_set_dict
#
def from_json(data_set_json: Any):
"""
Create a DataSet object from json (dictonary or str).
:param data_set_dict: A json dictionary that contains the keys of a DataSet or a string representation of a json dictionary.
:type data_set_dict: Any
:rtype: ibmpairs.catalog.DataSet
:raises Exception: if not a dictionary or a string.
"""
if isinstance(data_set_json, dict):
data_set = DataSet.from_dict(data_set_json)
elif isinstance(data_set_json, str):
data_set_dict = json.loads(data_set_json)
data_set = DataSet.from_dict(data_set_dict)
else:
msg = messages.ERROR_FROM_JSON_TYPE_NOT_RECOGNIZED.format(type(data_set_json), "data_set_json")
logger.error(msg)
raise common.PAWException(msg)
return data_set
#
def to_json(self):
"""
Create a string representation of a json dictionary from the objects structure.
:rtype: string
"""
return json.dumps(self.to_dict())
#
def to_json_data_set_post(self):
"""
Create a string representation of a json dictionary from the objects structure ready for a POST operation.
:rtype: string
"""
return json.dumps(self.to_dict_data_set_post())
#
def to_json_data_set_put(self):
"""
Create a string representation of a json dictionary from the objects structure ready for a PUT operation.
:rtype: string
"""
return json.dumps(self.to_dict_data_set_put())
#
def display(self,
columns: List[str] = ['id', 'name', 'description_short', 'description_long']
):
"""
A method to return a pandas.DataFrame object of a get result.
:param columns: The columns to be returned in the pandas.DataFrame object, defaults to ['id', 'name', 'description_short', 'description_long']
:type columns: List[str]
:returns: A pandas.DataFrame of attributes from the object.
:rtype: pandas.DataFrame
"""
display_dict = self.to_dict()
display_df = pd.DataFrame([display_dict], columns=columns)
return display_df
#
def get(self,
id = None,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
A method to get a Data Set.
:param id: The Data Set ID of the Data Set to be gathered.
:type id: str
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param verify: SSL verification
:type verify: bool
:returns: A populated DataSet object.
:rtype: ibmpairs.catalog.DataSet
:raises Exception: A ibmpairs.client.Client is not found,
an ID is not provided or already held in the object,
a server error occurred,
the status of the request is not 200.
"""
if id is not None:
self._id = common.check_str(id)
if self._id is None:
msg = messages.ERROR_CATALOG_DATA_SET_ID
logger.error(msg)
raise common.PAWException(msg)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT,
self_client = self._client)
try:
response = cli.get(url = cli.get_host() +
constants.CATALOG_DATA_SETS_API +
common.check_str(self._id),
verify = verify
)
except Exception as e:
msg = messages.ERROR_CLIENT_UNSPECIFIED_ERROR.format('GET', 'request', cli.get_host() + constants.CATALOG_DATA_SETS_API + common.check_str(self._id), e)
logger.error(msg)
raise common.PAWException(msg)
if response.status_code != 200:
error_message = 'failed'
msg = messages.ERROR_CATALOG_RESPOSE_NOT_SUCCESSFUL.format('GET', 'request', cli.get_host() + constants.CATALOG_DATA_SETS_API + common.check_str(self._id), response.status_code, error_message)
logger.error(msg)
raise common.PAWException(msg)
else:
data_set_get = DataSet.from_dict(response.json())
return data_set_get
#
def create(self,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
A method to create a Data Set.
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param verify: SSL verification
:type verify: bool
:raises Exception: A ibmpairs.client.Client is not found,
a server error occurred,
the status of the request is not 200.
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT,
self_client = self._client)
dataset_json = self.to_json_data_set_post()
try:
response = cli.post(url = cli.get_host() +
constants.CATALOG_DATA_SETS_API,
headers = constants.CLIENT_PUT_AND_POST_HEADER,
body = dataset_json,
verify = verify
)
except Exception as e:
msg = messages.ERROR_CLIENT_UNSPECIFIED_ERROR.format('POST', 'request', cli.get_host() + constants.CATALOG_DATA_SETS_API, e)
logger.error(msg)
raise common.PAWException(msg)
if response.status_code != 200:
error_message = 'failed'
if response.json is not None:
try:
self._data_set_response = data_set_return_from_dict(response.json())
error_message = self._data_set_response.message
except:
msg = messages.INFO_CATALOG_RESPOSE_NOT_SUCCESSFUL_NO_ERROR_MESSAGE
logger.info(msg)
msg = messages.ERROR_CATALOG_RESPOSE_NOT_SUCCESSFUL.format('POST', 'request', cli.get_host() + constants.CATALOG_DATA_SETS_API, response.status_code, error_message)
logger.error(msg)
raise common.PAWException(msg)
else:
self._data_set_response = data_set_return_from_dict(response.json())
self._id = self._data_set_response.data_set_id
msg = messages.INFO_CATALOG_DATA_SET_CREATE_SUCCESS.format(str(self._data_set_response.data_set_id))
logger.info(msg)
#
def update(self,
id = None,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
A method to update a Data Set.
:param id: The Data Set ID of the Data Set to be updated.
:type id: str
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param verify: SSL verification
:type verify: bool
:raises Exception: A ibmpairs.client.Client is not found,
an ID is not provided or already held in the object,
a server error occurred,
the status of the request is not 200.
"""
if id is not None:
self._id = common.check_str(id)
if self._id is None:
msg = messages.ERROR_CATALOG_DATA_SET_ID
logger.error(msg)
raise common.PAWException(msg)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT,
self_client = self._client)
dataset_json = self.to_json_data_set_put()
try:
response = cli.put(url = cli.get_host() +
constants.CATALOG_DATA_SETS_API +
common.check_str(self._id),
headers = constants.CLIENT_PUT_AND_POST_HEADER,
body = dataset_json,
verify = verify
)
except Exception as e:
msg = messages.ERROR_CLIENT_UNSPECIFIED_ERROR.format('PUT', 'request', cli.get_host() + constants.CATALOG_DATA_SETS_API + common.check_str(self._id), e)
logger.error(msg)
raise common.PAWException(msg)
if response.status_code != 200:
error_message = 'failed'
if response.json is not None:
try:
self._data_set_response = data_set_return_from_dict(response.json())
error_message = self._data_set_response.message
except:
msg = messages.INFO_CATALOG_RESPOSE_NOT_SUCCESSFUL_NO_ERROR_MESSAGE
logger.info(msg)
msg = messages.ERROR_CATALOG_RESPOSE_NOT_SUCCESSFUL.format('PUT', 'request', cli.get_host() + constants.CATALOG_DATA_SETS_API + common.check_str(self._id), response.status_code, error_message)
logger.error(msg)
raise common.PAWException(msg)
else:
self._data_set_response = data_set_return_from_dict(response.json())
msg = messages.INFO_CATALOG_DATA_SET_UPDATE_SUCCESS.format(str(self._data_set_response.data_set_id))
logger.info(msg)
# To ensure a user wishes to delete, the data set id must be specified- this will not be pulled from the object.
def delete(self,
id,
hard_delete: bool = False,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
A method to delete a Data Set.
:param id: The Data Set ID of the Data Set to be deleted.
:type id: str
:param hard_delete: Whether the Data Set should be 'hard deleted', NOTE: this also deletes all data held by associated Data Layers. This step is necessary where the intention is to delete and recreate a Data Set with the same name.
:type hard_delete: bool
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param verify: SSL verification
:type verify: bool
:raises Exception: A ibmpairs.client.Client is not found,
an ID is not provided or already held in the object,
a server error occurred,
the status of the request is not 200.
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT,
self_client = self._client)
if hard_delete is True:
url = cli.get_host() + constants.CATALOG_DATA_SETS_API + common.check_str(id) + "?hard_delete=true&force=true"
else:
url = cli.get_host() + constants.CATALOG_DATA_SETS_API + common.check_str(id)
try:
response = response = cli.delete(url = url,
verify = verify)
except Exception as e:
msg = messages.ERROR_CLIENT_UNSPECIFIED_ERROR.format('DELETE', 'request', url, e)
logger.error(msg)
raise common.PAWException(msg)
if response.status_code != 200:
error_message = 'failed'
if response.json() is not None:
try:
self._data_set_response = data_set_return_from_dict(response.json())
error_message = self._data_set_response.message
except:
msg = messages.INFO_CATALOG_RESPOSE_NOT_SUCCESSFUL_NO_ERROR_MESSAGE
logger.info(msg)
msg = messages.ERROR_CATALOG_RESPOSE_NOT_SUCCESSFUL.format('DELETE', 'request', url, response.status_code, error_message)
logger.error(msg)
raise common.PAWException(msg)
else:
self._data_set_response = data_set_return_from_dict(response.json())
msg = messages.INFO_CATALOG_DATA_SET_DELETE_SUCCESS.format(str(self._data_set_response.data_set_id))
logger.info(msg)
#
class DataSets:
#
#_client: cl.Client
# Common
#_data_sets: List[DataSet]
"""
An object to represent a list of IBM PAIRS Data Sets.
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param data_sets: A list of Data Sets.
:type data_sets: List[ibmpairs.catalog.DataSet]
:raises Exception: An ibmpairs.client.Client is not found.
"""
#
def __str__(self):
"""
The method creates a string representation of the internal class structure.
:returns: A string representation of the internal class structure.
:rtype: str
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __repr__(self):
"""
The method creates a dict representation of the internal class structure.
:returns: A dict representation of the internal class structure.
:rtype: dict
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __getitem__(self, data_set_name):
"""
A method to overload the default behaviour of the slice on this object to be an
element from the data_sets attribute.
:param data_set_name: The name of a Data Set to search for, if this is numeric,
the method simply returns the default (list order).
:type data_set_name: str
:raises Exception: If less than one value is found,
if more than one value is found.
"""
if isinstance(data_set_name, int):
return self._data_sets[data_set_name]
elif isinstance(data_set_name, str):
index_list = []
index = 0
foundCount = 0
for data_set in self._data_sets:
if data_set.name is not None:
if (data_set.name == data_set_name):
foundCount = foundCount + 1
index_list.append(index)
else:
msg = messages.WARN_CATALOG_DATA_SETS_DATA_SET_OBJECT_NO_NAME.format(data_set_name)
logger.warning(msg)
index = index + 1
if foundCount == 0:
msg = messages.ERROR_CATALOG_DATA_SETS_NO_DATA_SET.format(data_set_name)
logger.error(msg)
raise common.PAWException(msg)
elif foundCount == 1:
return self._data_sets[index_list[0]]
else:
msg = messages.ERROR_CATALOG_DATA_SETS_MULTIPLE_IDENTICAL_NAMES.format(data_set_name)
logger.error(msg)
raise common.PAWException(msg)
else:
msg = messages.ERROR_CATALOG_DATA_SETS_TYPE_UNKNOWN.format(type(data_set_name))
logger.error(msg)
raise common.PAWException(msg)
#
def __init__(self,
client: cl.Client = None,
data_sets: List[DataSet] = None
):
self._client = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
self._data_sets = data_sets
#
def get_client(self):
return self._client
#
def set_client(self, c):
self._client = common.check_class(c, cl.Client)
#
def del_client(self):
del self._client
#
client = property(get_client, set_client, del_client)
#
def get_data_sets(self):
return self._data_sets
#
def set_data_sets(self, data_sets):
self._data_sets = common.check_class(data_sets, List[DataSet])
#
def del_data_sets(self):
del self._data_sets
#
data_sets = property(get_data_sets, set_data_sets, del_data_sets)
#
def from_dict(data_sets_input: Any):
"""
Create a DataSets object from a dictionary.
:param data_sets_dict: A dictionary that contains the keys of a DataSets.
:type data_sets_dict: Any
:rtype: ibmpairs.catalog.DataSets
:raises Exception: If not a dictionary.
"""
data_sets = None
if isinstance(data_sets_input, dict):
common.check_dict(data_sets_input)
if "data_sets" in data_sets_input:
if data_sets_input.get("data_sets") is not None:
data_sets = common.from_list(data_sets_input.get("data_sets"), DataSet.from_dict)
elif isinstance(data_sets_input, list):
data_sets = common.from_list(data_sets_input, DataSet.from_dict)
else:
msg = messages.ERROR_CATALOG_DATA_SETS_UNKNOWN.format(type(data_sets_input))
logger.error(msg)
raise common.PAWException(msg)
return DataSets(data_sets = data_sets)
#
def to_dict(self):
"""
Create a dictionary from the objects structure.
:rtype: dict
"""
data_sets_dict: dict = {}
if self._data_sets is not None:
data_sets_dict["data_sets"] = common.from_list(self._data_sets, lambda item: common.class_to_dict(item, DataSet))
return data_sets_dict
#
def from_json(data_sets_json: Any):
"""
Create a DataSets object from json (dictonary or str).
:param data_sets_dict: A json dictionary that contains the keys of a DataSets or a string representation of a json dictionary.
:type data_sets_dict: Any
:rtype: ibmpairs.catalog.DataSets
:raises Exception: if not a dictionary or a string.
"""
if isinstance(data_sets_json, dict):
data_sets = DataSets.from_dict(data_sets_json)
elif isinstance(data_sets_json, str):
data_sets_dict = json.loads(data_sets_json)
data_sets = DataSets.from_dict(data_sets_dict)
else:
msg = messages.ERROR_FROM_JSON_TYPE_NOT_RECOGNIZED.format(type(data_sets_json), "data_sets_json")
logger.error(msg)
raise common.PAWException(msg)
return data_sets
#
def to_json(self):
"""
Create a string representation of a json dictionary from the objects structure.
:rtype: string
"""
return json.dumps(self.to_dict())
#
def display(self,
columns: List[str] = ['id', 'name', 'description_short', 'description_long'],
sort_by: str = 'id'
):
"""
A method to return a pandas.DataFrame object of get results.
:param columns: The columns to be returned in the pandas.DataFrame object, defaults to ['id', 'name', 'description_short', 'description_long']
:type columns: List[str]
:returns: A pandas.DataFrame of attributes from the data_sets attribute.
:rtype: pandas.DataFrame
"""
display_df = None
for data_set in self._data_sets:
next_display = data_set.display(columns)
if display_df is None:
display_df = next_display
else:
display_df = pd.concat([display_df, next_display])
display_df.reset_index(inplace=True, drop=True)
display_df.sort_values(by=[sort_by])
return display_df
#
def get(self,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
A method to get all of Data Sets a user has access to.
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param verify: SSL verification
:type verify: bool
:returns: A populated DataSets object.
:rtype: ibmpairs.catalog.DataSets
:raises Exception: A ibmpairs.client.Client is not found,
a server error occurred,
the status of the request is not 200.
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT,
self_client = self._client)
try:
response = cli.get(url = cli.get_host() +
constants.CATALOG_DATA_SETS_API_FULL,
verify = verify
)
except Exception as e:
msg = messages.ERROR_CLIENT_UNSPECIFIED_ERROR.format('GET', 'request', cli.get_host() + constants.CATALOG_DATA_SETS_API_FULL, e)
logger.error(msg)
raise common.PAWException(msg)
if response.status_code != 200:
error_message = 'failed'
msg = messages.ERROR_CATALOG_RESPOSE_NOT_SUCCESSFUL.format('GET', 'request', cli.get_host() + constants.CATALOG_DATA_SETS_API_FULL, response.status_code, error_message)
logger.error(msg)
raise common.PAWException(msg)
else:
data_sets_get = DataSets.from_dict(response.json())
self._data_sets = data_sets_get.data_sets
return data_sets_get
#
class ColorTable:
#_id: str
#_name: str
#_colors: str
"""
An object to represent a catalog color table.
:param id: An ID of a color table.
:type id: str
:param name: A name for the color table.
:type name: str
:param colors: A string list of colors.
:type colors: str
"""
#
def __str__(self):
"""
The method creates a string representation of the internal class structure.
:returns: A string representation of the internal class structure.
:rtype: str
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __repr__(self):
"""
The method creates a dict representation of the internal class structure.
:returns: A dict representation of the internal class structure.
:rtype: dict
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __init__(self,
id: str = None,
name: str = None,
colors: str = None
):
self._id = id
self._name = name
self._colors = colors
#
def get_id(self):
return self._id
#
def set_id(self, id):
self._id = common.check_str(id)
#
def del_id(self):
del self._id
#
id = property(get_id, set_id, del_id)
#
def get_name(self):
return self._name
#
def set_name(self, name):
self._name = common.check_str(name)
#
def del_name(self):
del self._name
#
name = property(get_name, set_name, del_name)
#
def get_colors(self):
return self._colors
#
def set_colors(self, colors):
self._colors = common.check_str(colors)
#
def del_colors(self):
del self._colors
#
colors = property(get_colors, set_colors, del_colors)
#
def from_dict(color_table_dict: Any):
"""
Create a ColorTable object from a dictionary.
:param color_table_dict: A dictionary that contains the keys of a ColorTable.
:type color_table_dict: Any
:rtype: ibmpairs.catalog.ColorTable
:raises Exception: If not a dictionary.
"""
id = None
name = None
colors = None
common.check_dict(color_table_dict)
if "id" in color_table_dict:
if color_table_dict.get("id") is not None:
id = common.check_str(color_table_dict.get("id"))
if "name" in color_table_dict:
if color_table_dict.get("name") is not None:
name = common.check_str(color_table_dict.get("name"))
if "colors" in color_table_dict:
if color_table_dict.get("colors") is not None:
colors = common.check_str(color_table_dict.get("colors"))
return ColorTable(id = id,
name = name,
colors = colors
)
#
def to_dict(self):
"""
Create a dictionary from the objects structure.
:rtype: dict
"""
color_table_dict: dict = {}
if self._id is not None:
color_table_dict["id"] = self._id
if self._name is not None:
color_table_dict["name"] = self._name
if self._colors is not None:
color_table_dict["colors"] = self._colors
return color_table_dict
#
def from_json(color_table_json: Any):
"""
Create a ColorTable object from json (dictonary or str).
:param color_table_dict: A json dictionary that contains the keys of a ColorTable or a string representation of a json dictionary.
:type color_table_dict: Any
:rtype: ibmpairs.catalog.ColorTable
:raises Exception: If not a dictionary or a string.
"""
if isinstance(color_table_json, dict):
color_table = ColorTable.from_dict(color_table_json)
elif isinstance(color_table_json, str):
color_table_dict = json.loads(color_table_json)
color_table = ColorTable.from_dict(color_table_dict)
else:
msg = messages.ERROR_FROM_JSON_TYPE_NOT_RECOGNIZED.format(type(color_table_json), "color_table_json")
logger.error(msg)
raise common.PAWException(msg)
return color_table
#
def to_json(self):
"""
Create a string representation of a json dictionary from the objects structure.
:rtype: string
"""
return json.dumps(self.to_dict())
#
class DataLayerReturn:
#_data_layer_ids: List[str]
#_status: int
#_message: str
#_id: str
"""
An object to represent the response from a DataLayer object call.
:param data_layer_ids: A list of Data Layer IDs.
:type data_layer_ids: List[str]
:param status: A status code.
:type status: int
:param message: A status message from the call.
:type message: str
:param id: A Data Layer ID.
:type id: str
"""
#
def __str__(self):
"""
The method creates a string representation of the internal class structure.
:returns: A string representation of the internal class structure.
:rtype: str
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __repr__(self):
"""
The method creates a dict representation of the internal class structure.
:returns: A dict representation of the internal class structure.
:rtype: dict
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __init__(self,
data_layer_ids: List[str] = None,
status: int = None,
message: str = None,
id: str = None
):
self._data_layer_ids = data_layer_ids
self._status = status
self._message = message
self._id = id
#
def get_data_layer_ids(self):
return self._data_layer_ids
#
def set_data_layer_ids(self, data_layer_ids):
if common.check_str(data_layer_ids):
self._data_layer_ids = data_layer_ids
elif common.check_class(data_layer_ids, List[str]):
self._data_layer_ids = data_layer_ids
else:
msg = messages.ERROR_CATALOG_SET_DATA_LAYER_ID
logger.error(msg)
raise common.PAWException(msg)
#
def del_data_layer_ids(self):
del self._data_layer_ids
#
data_layer_ids = property(get_data_layer_ids, set_data_layer_ids, del_data_layer_ids)
#
def get_status(self):
return self._status
#
def set_status(self, status):
self._status = common.check_int(status)
#
def del_status(self):
del self._status
#
status = property(get_status, set_status, del_status)
#
def get_message(self):
return self._message
#
def set_message(self, message):
self._message = common.check_str(message)
#
def del_message(self):
del self._message
#
message = property(get_message, set_message, del_message)
#
def get_id(self):
return self._id
#
def set_id(self, id):
self._id = common.check_str(id)
#
def del_id(self):
del self._id
#
id = property(get_id, set_id, del_id)
#
def from_dict(data_layer_return_dict: Any):
"""
Create a DataLayerReturn object from a dictionary.
:param data_layer_return_dict: A dictionary that contains the keys of a DataLayerReturn.
:type data_layer_return_dict: Any
:rtype: ibmpairs.catalog.DataLayerReturn
:raises Exception: If not a dictionary.
"""
data_layer_ids = None
status = None
message = None
id = None
common.check_dict(data_layer_return_dict)
if "datalayerIds" in data_layer_return_dict:
if data_layer_return_dict.get("datalayerIds") is not None:
if isinstance(data_layer_return_dict.get("datalayerIds"), list):
data_layer_ids = common.from_list(data_layer_return_dict.get("datalayerIds"), common.check_str)
elif isinstance(data_layer_return_dict.get("datalayerIds"), int):
data_layer_ids = common.check_str(data_layer_return_dict.get("datalayerIds"))
elif "data_layer_ids" in data_layer_return_dict:
if data_layer_return_dict.get("data_layer_ids") is not None:
if isinstance(data_layer_return_dict.get("data_layer_ids"), list):
data_layer_ids = common.from_list(data_layer_return_dict.get("data_layer_ids"), common.check_str)
elif isinstance(data_layer_return_dict.get("data_layer_ids"), int):
data_layer_ids = common.check_str(data_layer_return_dict.get("data_layer_ids"))
if "status" in data_layer_return_dict:
if data_layer_return_dict.get("status") is not None:
status = common.check_int(data_layer_return_dict.get("status"))
if "message" in data_layer_return_dict:
if data_layer_return_dict.get("message") is not None:
message = common.check_str(data_layer_return_dict.get("message"))
if "id" in data_layer_return_dict:
if data_layer_return_dict.get("id") is not None:
id = common.check_str(data_layer_return_dict.get("id"))
return DataLayerReturn(data_layer_ids = data_layer_ids,
status = status,
message = message,
id = id
)
#
def to_dict(self):
"""
Create a dictionary from the objects structure.
:rtype: dict
"""
data_layer_return_dict: dict = {}
if self._data_layer_ids is not None:
data_layer_return_dict["data_layer_ids"] = self._data_layer_ids
if self._status is not None:
data_layer_return_dict["status"] = self._status
if self._message is not None:
data_layer_return_dict["message"] = self._message
if self._id is not None:
data_layer_return_dict["id"] = self._id
return data_layer_return_dict
#
def from_json(data_layer_return_json: Any):
"""
Create a DataLayerReturn object from json (dictonary or str).
:param data_layer_return_dict: A json dictionary that contains the keys of a DataLayerReturn or a string representation of a json dictionary.
:type data_layer_return_dict: Any
:rtype: ibmpairs.catalog.DataLayerReturn
:raises Exception: If not a dictionary or a string.
"""
if isinstance(data_layer_return_json, dict):
data_layer_return = DataLayerReturn.from_dict(data_layer_return_json)
elif isinstance(data_layer_return_json, str):
data_layer_return_dict = json.loads(data_layer_return_json)
data_layer_return = DataLayerReturn.from_dict(data_layer_return_dict)
else:
msg = messages.ERROR_FROM_JSON_TYPE_NOT_RECOGNIZED.format(type(data_layer_return_json), "data_layer_return_json")
logger.error(msg)
raise common.PAWException(msg)
return data_layer_return
#
def to_json(self):
"""
Create a string representation of a json dictionary from the objects structure.
:rtype: string
"""
return json.dumps(self.to_dict())
#
class DataLayerDimensionReturn:
#_data_layer_dimension_id: str
#_status: int
#_message: str
"""
An object to represent the response from a DataLayerDimension object call.
:param data_layer_dimension_id: A Data Layer Dimension ID.
:type data_layer_dimension_id: str
:param status: A status code.
:type status: int
:param message: A status message from the call.
:type message: str
"""
#
def __str__(self):
"""
The method creates a string representation of the internal class structure.
:returns: A string representation of the internal class structure.
:rtype: str
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __repr__(self):
"""
The method creates a dict representation of the internal class structure.
:returns: A dict representation of the internal class structure.
:rtype: dict
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __init__(self,
data_layer_dimension_id: str = None,
status: int = None,
message: str = None
):
self._data_layer_dimension_id = data_layer_dimension_id
self._status = status
self._message = message
#
def get_data_layer_dimension_id(self):
return self._data_layer_dimension_id
#
def set_data_layer_dimension_id(self, data_layer_dimension_id):
self._data_layer_dimension_id = common.check_str(data_layer_dimension_id)
#
def del_data_layer_dimension_id(self):
del self._data_layer_dimension_id
#
data_layer_dimension_id = property(get_data_layer_dimension_id, set_data_layer_dimension_id, del_data_layer_dimension_id)
#
def get_status(self):
return self._status
#
def set_status(self, status):
self._status = common.check_int(status)
#
def del_status(self):
del self._status
#
status = property(get_status, set_status, del_status)
#
def get_message(self):
return self._message
#
def set_message(self, message):
self._message = common.check_str(message)
#
def del_message(self):
del self._message
#
message = property(get_message, set_message, del_message)
#
def from_dict(data_layer_dimension_return_dict: Any):
"""
Create a DataLayerDimensionReturn object from a dictionary.
:param data_layer_dimensions_return_dict: A dictionary that contains the keys of a DataLayerDimensionReturn.
:type data_layer_dimensions_return_dict: Any
:rtype: ibmpairs.catalog.DataLayerDimensionReturn
:raises Exception: If not a dictionary.
"""
data_layer_property_id = None
status = None
message = None
common.check_dict(data_layer_dimension_return_dict)
if "datalayerDimensionId" in data_layer_dimension_return_dict:
if data_layer_dimension_return_dict.get("datalayerDimensionId") is not None:
data_layer_dimension_id = common.check_str(data_layer_dimension_return_dict.get("datalayerDimensionId"))
elif "data_layer_dimension_id" in data_layer_dimension_return_dict:
if data_layer_dimension_return_dict.get("data_layer_dimension_id") is not None:
data_layer_dimension_id = common.check_str(data_layer_dimension_return_dict.get("data_layer_dimension_id"))
if "status" in data_layer_dimension_return_dict:
if data_layer_dimension_return_dict.get("status") is not None:
status = common.check_int(data_layer_dimension_return_dict.get("status"))
if "message" in data_layer_dimension_return_dict:
if data_layer_dimension_return_dict.get("message") is not None:
message = common.check_str(data_layer_dimension_return_dict.get("message"))
return DataLayerDimensionReturn(data_layer_dimension_id = data_layer_dimension_id,
status = status,
message = message
)
#
def to_dict(self):
"""
Create a dictionary from the objects structure.
:rtype: dict
"""
data_layer_dimension_return_dict: dict = {}
if self._data_layer_dimension_id is not None:
data_layer_dimension_return_dict["data_layer_dimension_id"] = self._data_layer_dimension_id
if self._status is not None:
data_layer_dimension_return_dict["status"] = self._status
if self._message is not None:
data_layer_dimension_return_dict["message"] = self._message
return data_layer_dimension_return_dict
#
def from_json(data_layer_dimension_return_json: Any):
"""
Create a DataLayerDimensionReturn object from json (dictonary or str).
:param data_layer_dimensions_return_dict: A json dictionary that contains the keys of a DataLayerDimensionReturn or a string representation of a json dictionary.
:type data_layer_dimensions_return_dict: Any
:rtype: ibmpairs.catalog.DataLayerDimensionReturn
:raises Exception: If not a dictionary or a string.
"""
if isinstance(data_layer_dimension_return_json, dict):
data_layer_dimension_return = DataLayerDimensionReturn.from_dict(data_layer_dimension_return_json)
elif isinstance(data_layer_dimension_return_json, str):
data_layer_dimension_return_dict = json.loads(data_layer_dimension_return_json)
data_layer_dimension_return = DataLayerDimensionReturn.from_dict(data_layer_dimension_return_dict)
else:
msg = messages.ERROR_FROM_JSON_TYPE_NOT_RECOGNIZED.format(type(data_layer_dimension_return_json), "data_layer_dimension_return_json")
logger.error(msg)
raise common.PAWException(msg)
return data_layer_dimension_return
#
def to_json(self):
"""
Create a string representation of a json dictionary from the objects structure.
:rtype: string
"""
return json.dumps(self.to_dict())
#
class DataLayerDimension:
#_client: cl.Client
#_data_layer_id: str
# Common
#_full_name: str
#_short_name: str
#_type: str
#_unit: str
# GET Exclusive
# (GET /v2/datalayers/{datalayer_id}/datalayer_dimensions)
#_id: str
#_order: int
#_identifier: str
# Internal
#_data_layer_dimension_response: DataLayerDimensionReturn
"""
An object to represent an IBM PAIRS Data Layer Dimension.
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param data_layer_id: A Data Layer ID.
:type data_layer_id: str
:param id: The ID number of the Data Layer Dimension.
:type id: str
:param order: The order number.
:type order: int
:param full_name: Full name of the Data Layer Dimension.
:type full_name: str
:param short_name: Short name of the Data Layer Dimension.
:type short_name: str
:param type: Type of the Data Layer Dimension.
:type type: str
:param identifier: The identifier.
:type identifier: str
:param unit: Unit of the Data Layer Dimension.
:type unit: str
:param data_layer_dimension_response: A response object from a DataLayerDimension method call.
:type data_layer_dimension_response: ibmpairs.catalog.DataLayerDimensionReturn
:raises Exception: An ibmpairs.client.Client is not found.
"""
#
def __str__(self):
"""
The method creates a string representation of the internal class structure.
:returns: A string representation of the internal class structure.
:rtype: str
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __repr__(self):
"""
The method creates a dict representation of the internal class structure.
:returns: A dict representation of the internal class structure.
:rtype: dict
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __init__(self,
client: cl.Client = None,
data_layer_id: str = None,
id: str = None,
order: int = None,
full_name: str = None,
short_name: str = None,
type: str = None,
identifier: str = None,
unit: str = None,
data_layer_dimension_response: DataLayerDimensionReturn = None
):
self._client = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
self._data_layer_id = data_layer_id
self._id = id
self._order = order
self._full_name = full_name
self._short_name = short_name
self._type = type
self._identifier = identifier
self._unit = unit
if data_layer_dimension_response is None:
self._data_layer_dimension_response = DataLayerDimensionReturn()
else:
self._data_layer_dimension_response = data_layer_dimension_response
#
def get_client(self):
return self._client
#
def set_client(self, c):
self._client = common.check_class(c, cl.Client)
#
def del_client(self):
del self._client
#
client = property(get_client, set_client, del_client)
#
def get_data_layer_id(self):
return self._data_layer_id
#
def set_data_layer_id(self, data_layer_id):
self._data_layer_id = common.check_str(data_layer_id)
#
def del_data_layer_id(self):
del self._data_layer_id
#
data_layer_id = property(get_data_layer_id, set_data_layer_id, del_data_layer_id)
#
def get_id(self):
return self._id
#
def set_id(self, id):
self._id = common.check_str(id)
#
def del_id(self):
del self._id
#
id = property(get_id, set_id, del_id)
#
def get_order(self):
return self._order
#
def set_order(self, order):
self._order = common.check_int(order)
#
def del_order(self):
del self._order
#
order = property(get_order, set_order, del_order)
#
def get_full_name(self):
return self._full_name
#
def set_full_name(self, full_name):
self._full_name = common.check_str(full_name)
#
def del_full_name(self):
del self._full_name
#
full_name = property(get_full_name, set_full_name, del_full_name)
#
def get_short_name(self):
return self._short_name
#
def set_short_name(self, short_name):
self._short_name = common.check_str(short_name)
#
def del_short_name(self):
del self._short_name
#
short_name = property(get_short_name, set_short_name, del_short_name)
#
def get_type(self):
return self._type
#
def set_type(self, type):
self._type = common.check_str(type)
#
def del_type(self):
del self._type
#
type = property(get_type, set_type, del_type)
#
def get_identifier(self):
return self._identifier
#
def set_identifier(self, identifier):
self._identifier = common.check_str(identifier)
#
def del_identifier(self):
del self._identifier
#
identifier = property(get_identifier, set_identifier, del_identifier)
#
def get_unit(self):
return self._unit
#
def set_unit(self, unit):
self._unit = common.check_str(unit)
#
def del_unit(self):
del self._unit
#
unit = property(get_unit, set_unit, del_unit)
#
def get_data_layer_dimension_response(self):
return self._data_layer_dimension_response
#
def set_data_layer_dimension_response(self, data_layer_dimension_response):
self._data_layer_dimension_response = common.check_class(data_layer_dimension_response, DataLayerDimensionReturn)
#
def del_data_layer_dimension_response(self):
del self._data_layer_dimension_response
#
data_layer_dimension_response = property(get_data_layer_dimension_response, set_data_layer_dimension_response, del_data_layer_dimension_response)
#
def from_dict(data_layer_dimension_dict: Any):
"""
Create a DataLayerDimension object from a dictionary.
:param data_layer_dimension_dict: A dictionary that contains the keys of a DataLayerDimension.
:type data_layer_dimension_dict: Any
:rtype: ibmpairs.catalog.DataLayerDimension
:raises Exception: if not a dictionary.
"""
data_layer_id = None
id = None
order = None
full_name = None
short_name = None
type = None
identifier = None
unit = None
data_layer_dimension_response = None
common.check_dict(data_layer_dimension_dict)
if "data_layer_id" in data_layer_dimension_dict:
if data_layer_dimension_dict.get("data_layer_id") is not None:
data_layer_id = common.check_str(data_layer_dimension_dict.get("data_layer_id"))
if "id" in data_layer_dimension_dict:
if data_layer_dimension_dict.get("id") is not None:
id = common.check_str(data_layer_dimension_dict.get("id"))
if "order" in data_layer_dimension_dict:
if data_layer_dimension_dict.get("order") is not None:
order = common.check_int(data_layer_dimension_dict.get("order"))
if "fullName" in data_layer_dimension_dict:
if data_layer_dimension_dict.get("fullName") is not None:
full_name = common.check_str(data_layer_dimension_dict.get("fullName"))
elif "full_name" in data_layer_dimension_dict:
if data_layer_dimension_dict.get("full_name") is not None:
full_name = common.check_str(data_layer_dimension_dict.get("full_name"))
if "shortName" in data_layer_dimension_dict:
if data_layer_dimension_dict.get("shortName") is not None:
short_name = common.check_str(data_layer_dimension_dict.get("shortName"))
elif "short_name" in data_layer_dimension_dict:
if data_layer_dimension_dict.get("short_name") is not None:
short_name = common.check_str(data_layer_dimension_dict.get("short_name"))
if "type" in data_layer_dimension_dict:
if data_layer_dimension_dict.get("type") is not None:
type = common.check_str(data_layer_dimension_dict.get("type"))
if "identifier" in data_layer_dimension_dict:
if data_layer_dimension_dict.get("identifier") is not None:
identifier = common.check_str(data_layer_dimension_dict.get("identifier"))
if "unit" in data_layer_dimension_dict:
if data_layer_dimension_dict.get("unit") is not None:
unit = common.check_str(data_layer_dimension_dict.get("unit"))
if "data_layer_dimension_response" in data_layer_dimension_dict:
if data_layer_dimension_dict.get("data_layer_dimension_response") is not None:
data_layer_dimension_response = DataLayerDimensionReturn.from_dict(data_layer_dimension_dict.get("data_layer_dimension_response"))
return DataLayerDimension(data_layer_id = data_layer_id,
id = id,
order = order,
full_name = full_name,
short_name = short_name,
type = type,
identifier = identifier,
unit = unit,
data_layer_dimension_response = data_layer_dimension_response
)
#
def to_dict(self):
"""
Create a dictionary from the objects structure.
:rtype: dict
"""
data_layer_dimension_dict: dict = {}
if self._data_layer_id is not None:
data_layer_dimension_dict["data_layer_id"] = self._data_layer_id
if self._id is not None:
data_layer_dimension_dict["id"] = self._id
if self._order is not None:
data_layer_dimension_dict["order"] = self._order
if self._full_name is not None:
data_layer_dimension_dict["full_name"] = self._full_name
if self._short_name is not None:
data_layer_dimension_dict["short_name"] = self._short_name
if self._type is not None:
data_layer_dimension_dict["type"] = self._type
if self._identifier is not None:
data_layer_dimension_dict["identifier"] = self._identifier
if self._unit is not None:
data_layer_dimension_dict["unit"] = self._unit
if self._data_layer_dimension_response is not None:
data_layer_dimension_dict["data_layer_dimension_response"] = common.class_to_dict(self._data_layer_dimension_response, DataLayerDimensionReturn)
return data_layer_dimension_dict
#
def to_dict_data_layer_dimension_post(self):
"""
Create a dictionary from the objects structure ready for a POST operation.
:rtype: dict
"""
data_layer_dimension_dict: dict = {}
if self._full_name is not None:
data_layer_dimension_dict["fullName"] = self._full_name
if self._short_name is not None:
data_layer_dimension_dict["shortName"] = self._short_name
if self._type is not None:
data_layer_dimension_dict["type"] = self._type
if self._unit is not None:
data_layer_dimension_dict["unit"] = self._unit
return data_layer_dimension_dict
#
def from_json(data_layer_dimension_json: Any):
"""
Create a DataLayerDimension object from json (dictonary or str).
:param data_layer_dimension_dict: A json dictionary that contains the keys of a DataLayerDimension or a string representation of a json dictionary.
:type data_layer_dimension_dict: Any
:rtype: ibmpairs.catalog.DataLayerDimension
:raises Exception: if not a dictionary or a string.
"""
if isinstance(data_layer_dimension_json, dict):
data_layer_dimension = DataLayerDimension.from_dict(data_layer_dimension_json)
elif isinstance(data_layer_dimension_json, str):
data_layer_dimension_dict = json.loads(data_layer_dimension_json)
data_layer_dimension = DataLayerDimension.from_dict(data_layer_dimension_dict)
else:
msg = messages.ERROR_FROM_JSON_TYPE_NOT_RECOGNIZED.format(type(data_layer_dimension_json), "data_layer_dimension_json")
logger.error(msg)
raise common.PAWException(msg)
return data_layer_dimension
#
def to_json(self):
"""
Create a string representation of a json dictionary from the objects structure.
:rtype: string
"""
return json.dumps(self.to_dict())
#
def to_json_data_layer_dimension_post(self):
"""
Create a string representation of a json dictionary from the objects structure ready for a POST operation.
:rtype: string
"""
return json.dumps(self.to_dict_data_layer_dimension_post())
#
def display(self,
columns: List[str] = ['id', 'short_name', 'identifier', 'order', 'full_name', 'type', 'unit']
):
"""
A method to return a pandas.DataFrame object of a get result.
:param columns: The columns to be returned in the pandas.DataFrame object, defaults to ['id', 'short_name', 'identifier', 'order', 'full_name', 'type', 'unit']
:type columns: List[str]
:returns: A pandas.DataFrame of attributes from the object.
:rtype: pandas.DataFrame
"""
display_dict = self.to_dict()
display_df = pd.DataFrame([display_dict], columns=columns)
return display_df
#
def get(self,
id = None,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
A method to get a Data Layer Dimension.
:param id: The Data Layer Dimension ID of the Data Layer Dimension to be gathered.
:type id: str
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param verify: SSL verification
:type verify: bool
:returns: A populated Data Layer Dimension object.
:rtype: ibmpairs.catalog.DataLayerDimension
:raises Exception: A ibmpairs.client.Client is not found,
an ID is not provided or already held in the object,
a server error occurred,
the status of the request is not 200.
"""
if id is not None:
self._id = common.check_str(id)
if self._id is None:
msg = messages.ERROR_CATALOG_DATA_LAYER_DIMENSION_ID
logger.error(msg)
raise common.PAWException(msg)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT,
self_client = self._client)
try:
response = cli.get(url = cli.get_host() +
constants.CATALOG_DATA_LAYER_DIMENSIONS_API +
common.check_str(self._id),
verify = verify
)
except Exception as e:
msg = messages.ERROR_CLIENT_UNSPECIFIED_ERROR.format('GET', 'request', cli.get_host() + constants.CATALOG_DATA_LAYER_DIMENSIONS_API + common.check_str(self._id), e)
logger.error(msg)
raise common.PAWException(msg)
if response.status_code != 200:
error_message = 'failed'
msg = messages.ERROR_CATALOG_RESPOSE_NOT_SUCCESSFUL.format('GET', 'request', cli.get_host() + constants.CATALOG_DATA_LAYER_DIMENSIONS_API + common.check_str(self._id), response.status_code, error_message)
logger.error(msg)
raise common.PAWException(msg)
else:
data_layer_dimension_get = DataLayerDimension.from_dict(response.json())
return data_layer_dimension_get
#
def create(self,
data_layer_id = None,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
A method to create a Data Layer Dimension.
:param data_layer_id: The ID of the Data Layer the Data Layer Dimension should be created for.
:type data_layer_id: str
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param verify: SSL verification
:type verify: bool
:raises Exception: A ibmpairs.client.Client is not found,
a Data Layer ID is not provided or already held in the object,
a server error occurred,
the status of the request is not 200.
"""
if data_layer_id is not None:
self._data_layer_id = common.check_str(data_layer_id)
if self._data_layer_id is None:
msg = messages.ERROR_CATALOG_DATA_LAYER_DIMENSION_DATA_LAYER_ID
logger.error(msg)
raise common.PAWException(msg)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT,
self_client = self._client)
data_layer_dimension = self.to_json_data_layer_dimension_post()
try:
response = cli.post(url = cli.get_host() +
constants.CATALOG_DATA_LAYERS_API +
self._data_layer_id +
constants.CATALOG_DATA_LAYERS_API_DIMENSIONS,
headers = constants.CLIENT_PUT_AND_POST_HEADER,
body = data_layer_dimension,
verify = verify
)
except Exception as e:
msg = messages.ERROR_CLIENT_UNSPECIFIED_ERROR.format('POST', 'request', cli.get_host() + constants.CATALOG_DATA_LAYERS_API + common.check_str(self._data_layer_id) + constants.CATALOG_DATA_LAYERS_API_DIMENSIONS, e)
logger.error(msg)
raise common.PAWException(msg)
if response.status_code != 200:
error_message = 'failed'
if response.json() is not None:
try:
data_layer_dimension_return = data_layer_dimension_return_from_dict(response.json())
error_message = data_layer_dimension_return.message
except:
msg = messages.INFO_CATALOG_RESPOSE_NOT_SUCCESSFUL_NO_ERROR_MESSAGE
logger.info(msg)
msg = messages.ERROR_CATALOG_RESPOSE_NOT_SUCCESSFUL.format('POST', 'request', cli.get_host() + constants.CATALOG_DATA_LAYERS_API + common.check_str(self._data_layer_id) + constants.CATALOG_DATA_LAYERS_API_DIMENSIONS, response.status_code, error_message)
logger.error(msg)
raise common.PAWException(msg)
else:
self._data_layer_dimension_response = data_layer_dimension_return_from_dict(response.json())
self._id = common.check_str(self._data_layer_dimension_response._data_layer_dimension_id)
msg = messages.INFO_CATALOG_DATA_LAYER_DIMENSIONS_CREATE_SUCCESS.format(str(self._data_layer_dimension_response._data_layer_dimension_id))
logger.info(msg)
#
class DataLayerDimensions:
#
#_client: cl.Client
# Common
#_data_layer_dimensions: List[DataLayerDimension]
#_data_layer_id: str
"""
An object to represent a list of IBM PAIRS Data Layer Dimensions.
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param data_layer_dimensions: An list of Data Layer Dimensions.
:type data_layer_dimensions: List[ibmpairs.catalog.DataLayerDimension]
:param data_layer_id: The Data Layer ID of the Data Layer Dimensions.
:type data_layer_id: str
:raises Exception: An ibmpairs.client.Client is not found.
"""
#
def __str__(self):
"""
The method creates a string representation of the internal class structure.
:returns: A string representation of the internal class structure.
:rtype: str
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __repr__(self):
"""
The method creates a dict representation of the internal class structure.
:returns: A dict representation of the internal class structure.
:rtype: dict
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __getitem__(self, data_layer_dimension_full_name):
"""
A method to overload the default behaviour of the slice on this object to be an
element from the data_layer_dimensions attribute.
:param data_layer_dimension_full_name: The name of a Data Layer Dimension to search for, if this is
numeric, the method simply returns the default (list order).
:type data_layer_dimension_full_name: str
:raises Exception: If less than one value is found,
if more than one value is found.
"""
if isinstance(data_layer_dimension_full_name, int):
return self._data_layer_dimensions[data_layer_dimension_full_name]
elif isinstance(data_layer_dimension_full_name, str):
index_list = []
index = 0
foundCount = 0
for data_layer_dimension in self._data_layer_dimensions:
if (data_layer_dimension.full_name == data_layer_dimension_full_name):
if (data_layer_dimension.full_name == data_layer_dimension_full_name):
foundCount = foundCount + 1
index_list.append(index)
else:
msg = messages.WARN_CATALOG_DATA_LAYER_DIMENSIONS_OBJECT_NO_NAME.format(data_layer_dimension_full_name)
logger.warning(msg)
index = index + 1
if foundCount == 0:
msg = messages.ERROR_CATALOG_DATA_LAYER_DIMENSIONS_NO_DATA_SET.format(data_layer_dimension_full_name)
logger.error(msg)
raise common.PAWException(msg)
elif foundCount == 1:
return self._data_layer_dimensions[index_list[0]]
else:
msg = messages.ERROR_CATALOG_DATA_LAYER_DIMENSIONS_MULTIPLE_IDENTICAL_NAMES.format(data_layer_dimension_full_name)
logger.error(msg)
raise common.PAWException(msg)
else:
msg = messages.ERROR_CATALOG_DATA_LAYER_DIMENSIONS_TYPE_UNKNOWN.format(type(data_layer_dimension_full_name))
logger.error(msg)
raise common.PAWException(msg)
#
def __init__(self,
client: cl.Client = None,
data_layer_dimensions: List[DataLayerDimension] = None,
data_layer_id: str = None
):
self._client = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
self._data_layer_dimensions = data_layer_dimensions
self._data_layer_id = data_layer_id
#
def get_client(self):
return self._client
#
def set_client(self, c):
self._client = common.check_class(c, cl.Client)
#
def del_client(self):
del self._client
#
client = property(get_client, set_client, del_client)
#
def get_data_layer_dimensions(self):
return self._data_layer_dimensions
#
def set_data_layer_dimensions(self, data_layer_dimensions):
self._data_layer_dimensions = common.check_class(data_layer_dimensions, List[DataLayerDimension])
#
def del_data_layer_dimensions(self):
del self._data_layer_dimensions
#
data_layer_dimensions = property(get_data_layer_dimensions, set_data_layer_dimensions, del_data_layer_dimensions)
#
def get_data_layer_id(self):
return self._data_layer_id
#
def set_data_layer_id(self, data_layer_id):
self._data_layer_id = common.check_str(data_layer_id)
#
def del_data_layer_id(self):
del self._data_layer_id
#
data_layer_id = property(get_data_layer_id, set_data_layer_id, del_data_layer_id)
#
def from_dict(data_layer_dimensions_input: Any):
"""
Create a DataLayerDimensions object from a dictionary.
:param data_layer_dimensions_dict: A dictionary that contains the keys of a DataLayerDimensions.
:type data_layer_dimensions_dict: Any
:rtype: ibmpairs.catalog.DataLayerDimensions
:raises Exception: If not a dictionary.
"""
data_layer_dimensions = None
if isinstance(data_layer_dimensions_input, dict):
common.check_dict(data_layer_dimensions_input)
if "data_layer_dimensions" in data_layer_dimensions_input:
if data_layer_dimensions_input.get("data_layer_dimensions") is not None:
data_layer_dimensions = common.from_list(data_layer_dimensions_input.get("data_layer_dimensions"), DataLayerDimension.from_dict)
if "data_layer_id" in data_layer_dimensions_input:
if data_layer_dimensions_input.get("data_layer_id") is not None:
data_layer_id = common.check_str(data_layer_dimensions_input.get("data_layer_id"))
elif isinstance(data_layer_dimensions_input, list):
data_layer_dimensions = common.from_list(data_layer_dimensions_input, DataLayerDimension.from_dict)
else:
msg = messages.ERROR_CATALOG_DATA_LAYER_DIMENSIONS_UNKNOWN.format(type(data_layer_dimensions_input))
logger.error(msg)
raise common.PAWException(msg)
return DataLayerDimensions(data_layer_dimensions = data_layer_dimensions)
#
def to_dict(self):
"""
Create a dictionary from the objects structure.
:rtype: dict
"""
data_layer_dimensions_dict: dict = {}
if self._data_layer_dimensions is not None:
data_layer_dimensions_dict["data_layer_dimensions"] = common.from_list(self._data_layer_dimensions, lambda item: common.class_to_dict(item, DataLayerDimension))
if self._data_layer_id is not None:
data_layer_dimensions_dict["data_layer_id"] = self._data_layer_id
return data_layer_dimensions_dict
#
def from_json(data_layer_dimensions_json: Any):
"""
Create a DataLayerDimensions object from json (dictonary or str).
:param data_layer_dimensions_dict: A json dictionary that contains the keys of a DataLayerDimensions or a string representation of a json dictionary.
:type data_layer_dimensions_dict: Any
:rtype: ibmpairs.catalog.DataLayerDimensions
:raises Exception: If not a dictionary or a string.
"""
if isinstance(data_layer_dimensions_json, dict):
data_layer_dimensions = DataLayerDimensions.from_dict(data_layer_dimensions_json)
elif isinstance(data_layer_dimensions_json, str):
data_layer_dimensions_dict = json.loads(data_layer_dimensions_json)
data_layer_dimensions = DataLayerDimensions.from_dict(data_layer_dimensions_dict)
else:
msg = messages.ERROR_FROM_JSON_TYPE_NOT_RECOGNIZED.format(type(data_layer_dimensions_json), "data_layer_dimensions_json")
logger.error(msg)
raise common.PAWException(msg)
return data_layer_dimensions
#
def to_json(self):
"""
Create a string representation of a json dictionary from the objects structure.
:rtype: string
"""
return json.dumps(self.to_dict())
#
def display(self,
columns: List[str] = ['id', 'short_name', 'identifier', 'order', 'full_name', 'type', 'unit'],
sort_by: str = 'id'
):
"""
A method to return a pandas.DataFrame object of get results.
:param columns: The columns to be returned in the pandas.DataFrame object, defaults to ['id', 'short_name', 'identifier', 'order', 'full_name', 'type', 'unit']
:type columns: List[str]
:returns: A pandas.DataFrame of attributes from the data_layer_dimensions attribute.
:rtype: pandas.DataFrame
"""
display_df = None
for data_layer_dimension in self._data_layer_dimensions:
next_display = data_layer_dimension.display(columns)
if display_df is None:
display_df = next_display
else:
display_df = pd.concat([display_df, next_display])
display_df.reset_index(inplace=True, drop=True)
return display_df.sort_values(by=[sort_by])
#
def get(self,
data_layer_id = None,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
A method to get a list of Data Layer Dimensions by Data Layer ID.
:param data_layer_id: The Data Layer ID of the Data Layer Dimensions to be gathered.
:type data_layer_id: str
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param verify: SSL verification
:type verify: bool
:returns: A populated Data Layer Dimensions object.
:rtype: ibmpairs.catalog.DataLayerDimensions
:raises Exception: A ibmpairs.client.Client is not found,
a Data Layer ID is not provided or already held in the object,
a server error occurred,
the status of the request is not 200.
"""
if data_layer_id is not None:
self._data_layer_id = common.check_str(data_layer_id)
if self._data_layer_id is None:
msg = messages.ERROR_CATALOG_DATA_LAYER_DIMENSIONS_DATA_LAYER_ID
logger.error(msg)
raise common.PAWException(msg)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT,
self_client = self._client)
try:
response = cli.get(url = cli.get_host() +
constants.CATALOG_DATA_LAYERS_API +
common.check_str(self._data_layer_id) +
constants.CATALOG_DATA_LAYERS_API_DIMENSIONS,
verify = verify
)
except Exception as e:
msg = messages.ERROR_CLIENT_UNSPECIFIED_ERROR.format('GET', 'request', cli.get_host() + constants.CATALOG_DATA_LAYERS_API + common.check_str(self._data_layer_id) + constants.CATALOG_DATA_LAYERS_API_DIMENSIONS, e)
logger.error(msg)
raise common.PAWException(msg)
if response.status_code != 200:
error_message = 'failed'
msg = messages.ERROR_CATALOG_RESPOSE_NOT_SUCCESSFUL.format('GET', 'request', cli.get_host() + constants.CATALOG_DATA_LAYERS_API + common.check_str(self._data_layer_id) + constants.CATALOG_DATA_LAYERS_API_DIMENSIONS, response.status_code, error_message)
logger.error(msg)
raise common.PAWException(msg)
else:
data_layer_dimensions_get = DataLayerDimensions.from_dict(response.json())
self._data_layer_dimensions = data_layer_dimensions_get.data_layer_dimensions
return data_layer_dimensions_get
#
class DataLayerPropertyReturn:
#_data_layer_property_id: str
#_status: int
#_message: str
"""
An object to represent the response from a DataLayerProperty object call.
:param data_layer_property_id: A Data Layer Property ID.
:type data_layer_property_id: str
:param status: A status code.
:type status: int
:param message: A status message from the call.
:type message: str
"""
#
def __str__(self):
"""
The method creates a string representation of the internal class structure.
:returns: A string representation of the internal class structure.
:rtype: str
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __repr__(self):
"""
The method creates a dict representation of the internal class structure.
:returns: A dict representation of the internal class structure.
:rtype: dict
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __init__(self,
data_layer_property_id: str = None,
status: int = None,
message: str = None
):
self._data_layer_property_id = data_layer_property_id
self._status = status
self._message = message
#
def get_data_layer_property_id(self):
return self._data_layer_property_id
#
def set_data_layer_property_id(self, data_layer_property_id):
self._data_layer_property_id = common.check_str(data_layer_property_id)
#
def del_data_layer_property_id(self):
del self._data_layer_property_id
#
data_layer_property_id = property(get_data_layer_property_id, set_data_layer_property_id, del_data_layer_property_id)
#
def get_status(self):
return self._status
#
def set_status(self, status):
self._status = common.check_int(status)
#
def del_status(self):
del self._status
#
status = property(get_status, set_status, del_status)
#
def get_message(self):
return self._message
#
def set_message(self, message):
self._message = common.check_str(message)
#
def del_message(self):
del self._message
#
message = property(get_message, set_message, del_message)
#
def from_dict(data_layer_property_return_dict: Any):
"""
Create a DataLayerPropertyReturn object from a dictionary.
:param data_layer_property_return_dict: A dictionary that contains the keys of a DataLayerPropertyReturn.
:type data_layer_property_return_dict: Any
:rtype: ibmpairs.catalog.DataLayerPropertyReturn
:raises Exception: if not a dictionary.
"""
data_layer_property_id = None
status = None
message = None
common.check_dict(data_layer_property_return_dict)
if "datalayerPropertyId" in data_layer_property_return_dict:
if data_layer_property_return_dict.get("datalayerPropertyId") is not None:
data_layer_property_id = common.check_str(data_layer_property_return_dict.get("datalayerPropertyId"))
elif "data_layer_property_id" in data_layer_property_return_dict:
if data_layer_property_return_dict.get("data_layer_property_id") is not None:
data_layer_property_id = common.check_str(data_layer_property_return_dict.get("data_layer_property_id"))
if "status" in data_layer_property_return_dict:
if data_layer_property_return_dict.get("status") is not None:
status = common.check_int(data_layer_property_return_dict.get("status"))
if "message" in data_layer_property_return_dict:
if data_layer_property_return_dict.get("message") is not None:
message = common.check_str(data_layer_property_return_dict.get("message"))
return DataLayerPropertyReturn(data_layer_property_id = data_layer_property_id,
status = status,
message = message
)
#
def to_dict(self):
"""
Create a dictionary from the objects structure.
:rtype: dict
"""
data_layer_property_return_dict: dict = {}
if self._data_layer_property_id is not None:
data_layer_property_return_dict["data_layer_property_id"] = self._data_layer_property_id
if self._status is not None:
data_layer_property_return_dict["status"] = self._status
if self._message is not None:
data_layer_property_return_dict["message"] = self._message
return data_layer_property_return_dict
#
def from_json(data_layer_property_return_json: Any):
"""
Create a DataLayerPropertyReturn object from json (dictonary or str).
:param data_layer_property_return_dict: A json dictionary that contains the keys of a DataLayerPropertyReturn or a string representation of a json dictionary.
:type data_layer_property_return_dict: Any
:rtype: ibmpairs.catalog.DataLayerPropertyReturn
:raises Exception: If not a dictionary or a string.
"""
if isinstance(data_layer_property_return_json, dict):
data_layer_property_return = DataLayerPropertyReturn.from_dict(data_layer_property_return_json)
elif isinstance(data_layer_property_return_json, str):
data_layer_property_return_dict = json.loads(data_layer_property_return_json)
data_layer_property_return = DataLayerPropertyReturn.from_dict(data_layer_property_return_dict)
else:
msg = messages.ERROR_FROM_JSON_TYPE_NOT_RECOGNIZED.format(type(data_layer_property_return_json), "data_layer_property_return_json")
logger.error(msg)
raise common.PAWException(msg)
return data_layer_property_return
#
def to_json(self):
"""
Create a string representation of a json dictionary from the objects structure.
:rtype: string
"""
return json.dumps(self.to_dict())
#
class DataLayerProperty:
#
#_client: cl.Client
# Common
#_full_name: str
#_short_name: str
#_type: str
#_unit: str
# GET Exclusive
# (GET /v2/datalayers/{datalayer_id}/datalayer_dimensions)
#_id: int
#_order: int
#_identifier: str
#_data_layer_id: str
# Internal
#_data_layer_property_response
"""
An object to represent an IBM PAIRS Data Layer Property.
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param data_layer_id: A Data Layer ID.
:type data_layer_id: str
:param id: The ID number of the Data Layer Property.
:type id: str
:param order: The order number.
:type order: int
:param full_name: Full name of the Data Layer Property.
:type full_name: str
:param short_name: Short name of the Data Layer Property.
:type short_name: str
:param type: Type of the Data Layer Property.
:type type: str
:param identifier: The identifier.
:type identifier: str
:param unit: Unit of the Data Layer Property.
:type unit: str
:param data_layer_property_response: A response object from a DataLayerProperty method call.
:type data_layer_property_response: ibmpairs.catalog.DataLayerPropertyReturn
:raises Exception: An ibmpairs.client.Client is not found.
"""
#
def __str__(self):
"""
The method creates a string representation of the internal class structure.
:returns: A string representation of the internal class structure.
:rtype: str
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __repr__(self):
"""
The method creates a dict representation of the internal class structure.
:returns: A dict representation of the internal class structure.
:rtype: dict
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __init__(self,
client: cl.Client = None,
data_layer_id: str = None,
id: str = None,
order: int = None,
full_name: str = None,
short_name: str = None,
type: str = None,
identifier: str = None,
unit: str = None,
data_layer_property_response: DataLayerPropertyReturn = None
):
self._client = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
self._data_layer_id = data_layer_id
self._id = id
self._order = order
self._full_name = full_name
self._short_name = short_name
self._type = type
self._identifier = identifier
self._unit = unit
if data_layer_property_response is None:
self._data_layer_property_response = DataLayerPropertyReturn()
else:
self._data_layer_property_response = data_layer_property_response
#
def get_client(self):
return self._client
#
def set_client(self, c):
self._client = common.check_class(c, cl.Client)
#
def del_client(self):
del self._client
#
client = property(get_client, set_client, del_client)
#
def get_data_layer_id(self):
return self._data_layer_id
#
def set_data_layer_id(self, data_layer_id):
self._data_layer_id = common.check_str(data_layer_id)
#
def del_data_layer_id(self):
del self._data_layer_id
#
data_layer_id = property(get_data_layer_id, set_data_layer_id, del_data_layer_id)
#
def get_id(self):
return self._id
#
def set_id(self, id):
self._id = common.check_str(id)
#
def del_id(self):
del self._id
#
id = property(get_id, set_id, del_id)
#
def get_order(self):
return self._order
#
def set_order(self, order):
self._order = common.check_int(order)
#
def del_order(self):
del self._order
#
order = property(get_order, set_order, del_order)
#
def get_full_name(self):
return self._full_name
#
def set_full_name(self, full_name):
self._full_name = common.check_str(full_name)
#
def del_full_name(self):
del self._full_name
#
full_name = property(get_full_name, set_full_name, del_full_name)
#
def get_short_name(self):
return self._short_name
#
def set_short_name(self, short_name):
self._short_name = common.check_str(short_name)
#
def del_short_name(self):
del self._short_name
#
short_name = property(get_short_name, set_short_name, del_short_name)
#
def get_type(self):
return self._type
#
def set_type(self, type):
self._type = common.check_str(type)
#
def del_type(self):
del self._type
#
type = property(get_type, set_type, del_type)
#
def get_unit(self):
return self._unit
#
def set_unit(self, unit):
self._unit = common.check_str(unit)
#
def del_unit(self):
del self._unit
#
unit = property(get_unit, set_unit, del_unit)
#
def get_identifier(self):
return self._identifier
#
def set_identifier(self, identifier):
self._identifier = common.check_str(identifier)
#
def del_identifier(self):
del self._identifier
#
identifier = property(get_identifier, set_identifier, del_identifier)
#
def get_data_layer_property_response(self):
return self._data_layer_property_response
#
def set_data_layer_property_response(self, data_layer_property_response):
self._data_layer_property_response = common.check_class(data_layer_property_response, DataLayerPropertyReturn)
#
def del_data_layer_property_response(self):
del self._data_layer_property_response
#
data_layer_property_response = property(get_data_layer_property_response, set_data_layer_property_response, del_data_layer_property_response)
#
def from_dict(data_layer_property_dict: Any):
"""
Create a DataLayerProperty object from a dictionary.
:param data_layer_property_dict: A dictionary that contains the keys of a DataLayerProperty.
:type data_layer_property_dict: Any
:rtype: ibmpairs.catalog.DataLayerProperty
:raises Exception: if not a dictionary.
"""
data_layer_id = None
id = None
order = None
full_name = None
short_name = None
type = None
identifier = None
unit = None
data_layer_property_response = None
common.check_dict(data_layer_property_dict)
if "data_layer_id" in data_layer_property_dict:
if data_layer_property_dict.get("data_layer_id") is not None:
data_layer_id = common.check_int(data_layer_property_dict.get("data_layer_id"))
if "id" in data_layer_property_dict:
if data_layer_property_dict.get("id") is not None:
id = common.check_str(data_layer_property_dict.get("id"))
if "order" in data_layer_property_dict:
if data_layer_property_dict.get("order") is not None:
order = common.check_int(data_layer_property_dict.get("order"))
if "fullName" in data_layer_property_dict:
if data_layer_property_dict.get("fullName") is not None:
full_name = common.check_str(data_layer_property_dict.get("fullName"))
elif "full_name" in data_layer_property_dict:
if data_layer_property_dict.get("full_name") is not None:
full_name = common.check_str(data_layer_property_dict.get("full_name"))
if "shortName" in data_layer_property_dict:
if data_layer_property_dict.get("shortName") is not None:
short_name = common.check_str(data_layer_property_dict.get("shortName"))
elif "short_name" in data_layer_property_dict:
if data_layer_property_dict.get("short_name") is not None:
short_name = common.check_str(data_layer_property_dict.get("short_name"))
if "type" in data_layer_property_dict:
if data_layer_property_dict.get("type") is not None:
type = common.check_str(data_layer_property_dict.get("type"))
if "identifier" in data_layer_property_dict:
if data_layer_property_dict.get("identifier") is not None:
identifier = common.check_str(data_layer_property_dict.get("identifier"))
if "unit" in data_layer_property_dict:
if data_layer_property_dict.get("unit") is not None:
unit = common.check_str(data_layer_property_dict.get("unit"))
if "data_layer_property_response" in data_layer_property_dict:
if data_layer_property_dict.get("data_layer_property_response") is not None:
data_layer_property_response = DataLayerPropertyReturn.from_dict(data_layer_property_dict.get("data_layer_property_response"))
return DataLayerProperty(data_layer_id = data_layer_id,
id = id,
order = order,
full_name = full_name,
short_name = short_name,
type = type,
identifier = identifier,
unit = unit,
data_layer_property_response = data_layer_property_response
)
#
def to_dict(self):
"""
Create a dictionary from the objects structure.
:rtype: dict
"""
data_layer_property_dict: dict = {}
if self._data_layer_id is not None:
data_layer_property_dict["data_layer_id"] = self._data_layer_id
if self._id is not None:
data_layer_property_dict["id"] = self._id
if self._order is not None:
data_layer_property_dict["order"] = self._order
if self._full_name is not None:
data_layer_property_dict["full_name"] = self._full_name
if self._short_name is not None:
data_layer_property_dict["short_name"] = self._short_name
if self._type is not None:
data_layer_property_dict["type"] = self._type
if self._identifier is not None:
data_layer_property_dict["identifier"] = self._identifier
if self._unit is not None:
data_layer_property_dict["unit"] = self._unit
if self._data_layer_property_response is not None:
data_layer_property_dict["data_layer_property_response"] = common.class_to_dict(self._data_layer_property_response, DataLayerPropertyReturn)
return data_layer_property_dict
#
def to_dict_data_layer_property_post(self):
"""
Create a dictionary from the objects structure ready for a POST operation.
:rtype: dict
"""
data_layer_property_dict: dict = {}
if self._full_name is not None:
data_layer_property_dict["fullName"] = self._full_name
if self._short_name is not None:
data_layer_property_dict["shortName"] = self._short_name
if self._type is not None:
data_layer_property_dict["type"] = self._type
if self._unit is not None:
data_layer_property_dict["unit"] = self._unit
return data_layer_property_dict
#
def from_json(data_layer_property_json: Any):
"""
Create a DataLayerProperty object from json (dictonary or str).
:param data_layer_property_dict: A json dictionary that contains the keys of a DataLayerProperty or a string representation of a json dictionary.
:type data_layer_property_dict: Any
:rtype: ibmpairs.catalog.DataLayerProperty
:raises Exception: If not a dictionary or a string.
"""
if isinstance(data_layer_property_json, dict):
data_layer_property = DataLayerProperty.from_dict(data_layer_property_json)
elif isinstance(data_layer_property_json, str):
data_layer_property_dict = json.loads(data_layer_property_json)
data_layer_property = DataLayerProperty.from_dict(data_layer_property_dict)
else:
msg = messages.ERROR_FROM_JSON_TYPE_NOT_RECOGNIZED.format(type(data_layer_property_json), "data_layer_property_json")
logger.error(msg)
raise common.PAWException(msg)
return data_layer_property
#
def to_json(self):
"""
Create a string representation of a json dictionary from the objects structure.
:rtype: string
"""
return json.dumps(self.to_dict())
#
def to_json_data_layer_property_post(self):
"""
Create a string representation of a json dictionary from the objects structure ready for a POST operation.
:rtype: string
"""
return json.dumps(self.to_dict_data_layer_property_post())
#
def display(self,
columns: List[str] = ['id', 'short_name', 'identifier', 'order', 'full_name', 'type', 'unit']
):
"""
A method to return a pandas.DataFrame object of a get result.
:param columns: The columns to be returned in the pandas.DataFrame object, defaults to ['id', 'short_name', 'identifier', 'order', 'full_name', 'type', 'unit']
:type columns: List[str]
:returns: A pandas.DataFrame of attributes from the object.
:rtype: pandas.DataFrame
"""
display_dict = self.to_dict()
display_df = pd.DataFrame([display_dict], columns=columns)
return display_df
#
def get(self,
id = None,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
A method to get a Data Layer Property.
:param id: The Data Layer Property ID of the Data Layer Property to be gathered.
:type id: str
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param verify: SSL verification
:type verify: bool
:returns: A populated Data Layer Property object.
:rtype: ibmpairs.catalog.DataLayerProperty
:raises Exception: A ibmpairs.client.Client is not found,
an ID is not provided or already held in the object,
a server error occurred,
the status of the request is not 200.
"""
if id is not None:
self._id = common.check_str(id)
if self._id is None:
msg = messages.ERROR_CATALOG_DATA_LAYER_PROPERTY_ID
logger.error(msg)
raise common.PAWException(msg)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT,
self_client = self._client)
try:
response = cli.get(url = cli.get_host() +
constants.CATALOG_DATA_LAYER_PROPERTIES_API +
common.check_str(self._id),
verify = verify
)
except Exception as e:
msg = messages.ERROR_CLIENT_UNSPECIFIED_ERROR.format('GET', 'request', cli.get_host() + constants.CATALOG_DATA_LAYER_PROPERTIES_API + common.check_str(self._id), e)
logger.error(msg)
raise common.PAWException(msg)
if response.status_code != 200:
error_message = 'failed'
msg = messages.ERROR_CATALOG_RESPOSE_NOT_SUCCESSFUL.format('GET', 'request', cli.get_host() + constants.CATALOG_DATA_LAYER_PROPERTIES_API + common.check_str(self._id), response.status_code, error_message)
logger.error(msg)
raise common.PAWException(msg)
else:
data_layer_property_get = DataLayerProperty.from_dict(response.json())
return data_layer_property_get
#
def create(self,
data_layer_id = None,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
A method to create a Data Layer Property.
:param data_layer_id: The ID of the Data Layer the Data Layer Property should be created for.
:type data_layer_id: str
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param verify: SSL verification
:type verify: bool
:raises Exception: A ibmpairs.client.Client is not found,
a Data Layer ID is not provided or already held in the object,
a server error occurred,
the status of the request is not 200.
"""
if data_layer_id is not None:
self._data_layer_id = common.check_str(data_layer_id)
if self._data_layer_id is None:
msg = messages.ERROR_CATALOG_DATA_LAYER_PROPERTY_DATA_LAYER_ID
logger.error(msg)
raise common.PAWException(msg)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT,
self_client = self._client)
data_layer_property = self.to_json_data_layer_property_post()
try:
response = cli.post(url = cli.get_host() +
constants.CATALOG_DATA_LAYERS_API +
common.check_str(self._data_layer_id) +
constants.CATALOG_DATA_LAYERS_API_PROPERTIES,
headers = constants.CLIENT_PUT_AND_POST_HEADER,
body = data_layer_property,
verify = verify
)
except Exception as e:
msg = messages.ERROR_CLIENT_UNSPECIFIED_ERROR.format('POST', 'request', cli.get_host() + constants.CATALOG_DATA_LAYERS_API + common.check_str(self._data_layer_id) + constants.CATALOG_DATA_LAYERS_API_PROPERTIES, e)
logger.error(msg)
raise common.PAWException(msg)
if response.status_code != 200:
error_message = 'failed'
if response.json() is not None:
try:
self._data_layer_property_return = data_layer_property_return_from_dict(response.json())
error_message = self._data_layer_property_return.message
except:
msg = messages.INFO_CATALOG_RESPOSE_NOT_SUCCESSFUL_NO_ERROR_MESSAGE
logger.info(msg)
msg = messages.ERROR_CATALOG_RESPOSE_NOT_SUCCESSFUL.format('POST', 'request', cli.get_host() + constants.CATALOG_DATA_LAYERS_API + common.check_str(self._data_layer_id) + constants.CATALOG_DATA_LAYERS_API_PROPERTIES, response.status_code, error_message)
logger.error(msg)
raise common.PAWException(msg)
else:
self._data_layer_property_response = data_layer_property_return_from_dict(response.json())
self._id = common.check_str(self._data_layer_property_response._data_layer_property_id)
msg = messages.INFO_CATALOG_DATA_LAYER_PROPERTY_CREATE_SUCCESS.format(common.check_str(self._data_layer_property_response._data_layer_property_id))
logger.info(msg)
#
class DataLayerProperties:
#
#_client: cl.Client
# Common
#_data_layer_properties: List[DataLayerProperty]
#_data_layer_id: str
"""
An object to represent a list of IBM PAIRS Data Layer Properties.
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param data_layer_properties: An list of Data Layer Properties.
:type data_layer_properties: List[ibmpairs.catalog.DataLayerProperty]
:param data_layer_id: The Data Layer ID of the Data Layer Properties.
:type data_layer_id: str
:raises Exception: An ibmpairs.client.Client is not found.
"""
#
def __str__(self):
"""
The method creates a string representation of the internal class structure.
:returns: A string representation of the internal class structure.
:rtype: str
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __repr__(self):
"""
The method creates a dict representation of the internal class structure.
:returns: A dict representation of the internal class structure.
:rtype: dict
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __getitem__(self, data_layer_property_full_name):
"""
A method to overload the default behaviour of the slice on this object to be an
element from the data_layer_properties attribute.
:param data_layer_property_full_name: The name of a Data Layer Property to search for, if this is
numeric, the method simply returns the default (list order).
:type data_layer_property_full_name: str
:raises Exception: If less than one value is found,
if more than one value is found.
"""
if isinstance(data_layer_property_full_name, int):
return self._data_layer_properties[data_layer_property_full_name]
elif isinstance(data_layer_property_full_name, str):
index_list = []
index = 0
foundCount = 0
for data_layer_property in self._data_layer_properties:
if (data_layer_property.full_name == data_layer_property_full_name):
if (data_layer_property.full_name == data_layer_property_full_name):
foundCount = foundCount + 1
index_list.append(index)
else:
msg = messages.WARN_CATALOG_DATA_LAYER_PROPERTIES_OBJECT_NO_NAME.format(data_layer_property_full_name)
logger.warning(msg)
index = index + 1
if foundCount == 0:
msg = messages.ERROR_CATALOG_DATA_LAYER_PROPERTIES_NO_DATA_SET.format(data_layer_property_full_name)
logger.error(msg)
raise common.PAWException(msg)
elif foundCount == 1:
return self._data_layer_properties[index_list[0]]
else:
msg = messages.ERROR_CATALOG_DATA_LAYER_PROPERTIES_MULTIPLE_IDENTICAL_NAMES.format(data_layer_property_full_name)
logger.error(msg)
raise common.PAWException(msg)
else:
msg = messages.ERROR_CATALOG_DATA_LAYER_PROPERTIES_TYPE_UNKNOWN.format(type(data_layer_property_full_name))
logger.error(msg)
raise common.PAWException(msg)
#
def __init__(self,
client: cl.Client = None,
data_layer_properties: List[DataLayerProperty] = None,
data_layer_id: str = None
):
self._client = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
self._data_layer_properties = data_layer_properties
self._data_layer_id = data_layer_id
#
def get_client(self):
return self._client
#
def set_client(self, c):
self._client = common.check_class(c, cl.Client)
#
def del_client(self):
del self._client
#
client = property(get_client, set_client, del_client)
#
def get_data_layer_properties(self):
return self._data_layer_properties
#
def set_data_layer_properties(self, data_layer_properties):
self._data_layer_properties = common.check_class(data_layer_properties, List[DataLayerProperty])
#
def del_data_layer_properties(self):
del self._data_layer_properties
#
data_layer_properties = property(get_data_layer_properties, set_data_layer_properties, del_data_layer_properties)
#
def get_data_layer_id(self):
return self._data_layer_id
#
def set_data_layer_id(self, data_layer_id):
self._data_layer_id = common.check_str(data_layer_id)
#
def del_data_layer_id(self):
del self._data_layer_id
#
data_layer_id = property(get_data_layer_id, set_data_layer_id, del_data_layer_id)
#
def from_dict(data_layer_properties_input: Any):
"""
Create a DataLayerProperties object from a dictionary.
:param data_layer_properties_dict: A dictionary that contains the keys of a DataLayerProperties.
:type data_layer_properties_dict: Any
:rtype: ibmpairs.catalog.DataLayerProperties
:raises Exception: If not a dictionary.
"""
data_layer_properties = None
if isinstance(data_layer_properties_input, dict):
common.check_dict(data_layer_properties_input)
if "data_layer_properties" in data_layer_properties_input:
if data_layer_properties_input.get("data_layer_properties") is not None:
data_layer_properties = common.from_list(data_layer_properties_input.get("data_layer_properties"), DataLayerProperty.from_dict)
if "data_layer_id" in data_layer_properties_input:
if data_layer_properties_input.get("data_layer_id") is not None:
data_layer_id = common.check_str(data_layer_properties_input.get("data_layer_id"))
elif isinstance(data_layer_properties_input, list):
data_layer_properties = common.from_list(data_layer_properties_input, DataLayerProperty.from_dict)
else:
msg = messages.ERROR_CATALOG_DATA_LAYER_PROPERTIES_UNKNOWN.format(type(data_layer_properties_input))
logger.error(msg)
raise common.PAWException(msg)
return DataLayerProperties(data_layer_properties = data_layer_properties)
#
def to_dict(self):
"""
Create a dictionary from the objects structure.
:rtype: dict
"""
data_layer_properties_dict: dict = {}
if self._data_layer_properties is not None:
data_layer_properties_dict["data_layer_properties"] = common.from_list(self._data_layer_properties, lambda item: common.class_to_dict(item, DataLayerProperty))
if self._data_layer_id is not None:
data_layer_properties_dict["data_layer_id"] = self._data_layer_id
return data_layer_properties_dict
#
def from_json(data_layer_properties_json: Any):
"""
Create a DataLayerProperties object from json (dictonary or str).
:param data_layer_properties_dict: A json dictionary that contains the keys of a DataLayerProperties or a string representation of a json dictionary.
:type data_layer_properties_dict: Any
:rtype: ibmpairs.catalog.DataLayerProperties
:raises Exception: If not a dictionary or a string.
"""
if isinstance(data_layer_properties_json, dict):
data_layer_properties = DataLayerProperties.from_dict(data_layer_properties_json)
elif isinstance(data_layer_properties_json, str):
data_layer_properties_dict = json.loads(data_layer_properties_json)
data_layer_properties = DataLayerProperties.from_dict(data_layer_properties_dict)
else:
msg = messages.ERROR_FROM_JSON_TYPE_NOT_RECOGNIZED.format(type(data_layer_properties_json), "data_layer_properties_json")
logger.error(msg)
raise common.PAWException(msg)
return data_layer_properties
#
def to_json(self):
"""
Create a string representation of a json dictionary from the objects structure.
:rtype: string
"""
return json.dumps(self.to_dict())
#
def display(self,
columns: List[str] = ['id', 'short_name', 'identifier', 'order', 'full_name', 'type', 'unit'],
sort_by: str = 'id'
):
"""
A method to return a pandas.DataFrame object of get results.
:param columns: The columns to be returned in the pandas.DataFrame object, defaults to ['id', 'name', 'description_short', 'description_long']
:type columns: List[str]
:returns: A pandas.DataFrame of attributes from the data_layer_properties attribute.
:rtype: pandas.DataFrame
"""
display_df = None
for data_layer_property in self._data_layer_properties:
next_display = data_layer_property.display(columns)
if display_df is None:
display_df = next_display
else:
display_df = pd.concat([display_df, next_display])
display_df.reset_index(inplace=True, drop=True)
return display_df.sort_values(by=[sort_by])
#
def get(self,
data_layer_id = None,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
A method to get a list of Data Layer Properties by Data Layer ID.
:param data_layer_id: The Data Layer ID of the Data Layer Properties to be gathered.
:type data_layer_id: str
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param verify: SSL verification
:type verify: bool
:returns: A populated Data Layer Properties object.
:rtype: ibmpairs.catalog.DataLayerProperties
:raises Exception: A ibmpairs.client.Client is not found,
a Data Layer ID is not provided or already held in the object,
a server error occurred,
the status of the request is not 200.
"""
if data_layer_id is not None:
self._data_layer_id = common.check_str(data_layer_id)
if self._data_layer_id is None:
msg = messages.ERROR_CATALOG_DATA_LAYER_PROPERTIES_ID
logger.error(msg)
raise common.PAWException(msg)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT,
self_client = self._client)
try:
response = cli.get(url = cli.get_host() +
constants.CATALOG_DATA_LAYERS_API +
common.check_str(data_layer_id) +
constants.CATALOG_DATA_LAYERS_API_PROPERTIES,
verify = verify
)
except Exception as e:
msg = messages.ERROR_CLIENT_UNSPECIFIED_ERROR.format('GET', 'request', cli.get_host() + constants.CATALOG_DATA_LAYERS_API + common.check_str(data_layer_id) + constants.CATALOG_DATA_LAYERS_API_PROPERTIES, e)
logger.error(msg)
raise common.PAWException(msg)
if response.status_code != 200:
error_message = 'failed'
msg = messages.ERROR_CATALOG_RESPOSE_NOT_SUCCESSFUL.format('GET', 'request', cli.get_host() + constants.CATALOG_DATA_LAYERS_API + common.check_str(data_layer_id) + constants.CATALOG_DATA_LAYERS_API_PROPERTIES, response.status_code, error_message)
logger.error(msg)
raise common.PAWException(msg)
else:
data_layer_properties_get = DataLayerProperties.from_dict(response.json())
self._data_layer_properties = data_layer_properties_get.data_layer_properties
return data_layer_properties_get
#
class DataLayer:
#
#_client: cl.Client
# Common
#_name: str
#_description: str
#_name_alternate: str
#_rating: float
#_description_short: str
#_description_long: str
#_description_links: List[str]
#_data_source_name: str
#_data_source_attribution: str
#_data_source_description: str
#_data_source_links: List[str]
#_update_interval_max: str
#_update_interval_description: str
#_lag_horizon: str
#_lag_horizon_description: str
#_temporal_resolution: str
#_temporal_resolution_description: str
#_spatial_resolution_of_raw_data: str
#_interpolation: str
#_interpolation_upload: str
#_dimensions_description: str
#_permanence: bool
#_permanence_description: str
#_known_issues: str
#_properties: Properties
#_spatial_coverage: SpatialCoverage
#_latitude_min: float
#_longitude_min: float
#_latitude_max: float
#_longitude_max: float
#_temporal_min: str
#_temporal_max: str
#_measurement_interval: str
#_measurement_interval_description: str
#_meaning_of_timestamp: str
#_meaning_of_spatial_descriptor: str
#_data_layer_return: DataLayerReturn
# Get Exclusive
# (GET /v2/datalayers/{datalayer_id})
#_id: str
#_dataset: DataSet
#_created_at: str
#_updated_at: str
#_type: str
#_unit: str
#_dataset_id: str
# Create Exclusive
# (POST /v2/datasets/{dataset_id}/datalayers)
# N/A
# Update Exclusive
# (PUT /v2/datalayers/{datalayer_id})
# N/A
# Get & Update
# (GET /v2/datalayers/{datalayer_id})
# (GET /v2/datalayers/full)
#_min_value: float
#_max_value: float
# Create & Get Common
# (POST /v2/datasets/{dataset_id}/datalayers)
# (GET /v2/datalayers/{datalayer_id})
#_units: str
#_datatype: str
#_level: int
#_crs: str
#_color_table: ColorTable
# Create & Update Common
# (POST /v2/datasets/{dataset_id}/datalayers)
# (PUT /v2/datalayers/{datalayer_id})
#_description_internal: str
#_description_internal_links: List[str]
#_formula: str
# Internal
#_data_layer_response: DataLayerReturn
"""
An object to represent an IBM PAIRS Data Set.
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param name: Data Layer name.
:type name: str
:param description: Data Layer description.
:type description: str
:param name_alternate: Alternative Data Layer name.
:type name_alternate: str
:param rating: Rating.
:type rating: float
:param description_short: Short description of the Layer Set.
:type description_short: str
:param description_long: Long description of the Layer Set.
:type description_long: str
:param description_links: A list of URLs with supporting documentation.
:type description_links: List[str]
:param data_source_name: A name for the origin data source.
:type data_source_name: str
:param data_source_attribution: An attribution for the origin data source.
:type data_source_attribution: str
:param data_source_description: A description of the origin data source.
:type data_source_description: str
:param data_source_links: A list of URLs with supporting documentation of the origin data source.
:type data_source_links: List[str]
:param update_interval_max: The maximum interval of an update to the Data Layer.
:type update_interval_max: str
:param update_interval_description: A description of the maximum update interval.
:type update_interval_description: str
:param lag_horizon: Lag horizon of the Data Layer.
:type lag_horizon: str
:param lag_horizon_description: Lag horizon description.
:type lag_horizon_description: str
:param temporal_resolution: The temporal resolution of the Data Layer.
:type temporal_resolution: str
:param temporal_resolution_description: A description of the temporal resolution.
:type temporal_resolution_description: str
:param spatial_resolution_of_raw_data: Spatial resolution of the raw data.
:type spatial_resolution_of_raw_data: str
:param interpolation: Interpolation.
:type interpolation: str
:param interpolation_upload: Interpolation on upload.
:type interpolation_upload: str
:param dimensions_description: A description of the dimensions.
:type dimensions_description: str
:param permanence: Permanence.
:type permanence: bool
:param permanence_description: A description of the permanence value.
:type permanence_description: str
:param known_issues: Known issues with the data.
:type known_issues: str
:param properties: A properties entry.
:type properties: ibmpairs.catalog.Properties
:param spatial_coverage: A spatial coverage entry.
:type spatial_coverage: ibmpairs.catalog.SpatialCoverage
:param latitude_min: The minimum latitude of the Data Set.
:type latitude_min: float
:param longitude_min: The minimum longitude of the Data Set.
:type longitude_min: float
:param latitude_max: The maximum latitude of the Data Set.
:type latitude_max: float
:param longitude_max: The maximum longitude of the Data Set.
:type longitude_max: float
:param temporal_min: The minimum temporal value of the Data Set.
:type temporal_min: str
:param temporal_max: The maximum temporal value of the Data Set.
:type temporal_max: str
:param measurement_interval: The measurement interval of the data.
:type measurement_interval: str
:param measurement_interval_description: A description of the measurement interval.
:type measurement_interval_description: str
:param meaning_of_timestamp: A description of the meaning of the timestamp value.
:type meaning_of_timestamp: str
:param meaning_of_spatial_descriptor: A description of the meaning of the spatial descriptor.
:type meaning_of_spatial_descriptor: str
:param id: The Data Layer ID.
:type id: str
:param dataset: The Data Set a Data Layer belongs to.
:type dataset: ibmpairs.catalog.DataSet
:param created_at: The date of creation.
:type created_at: str
:param updated_at: The last updated date.
:type updated_at: str
:param type: Type.
:type type: str
:param unit: Unit.
:type unit: str
:param dataset_id: The Data Set ID.
:type dataset_id: str
:param min_value: The maximum value of the data in the Data Layer.
:type min_value: float
:param max_value: The minimum value of the data in the Data Layer.
:type max_value: float
:param units: Units.
:type units: str
:param datatype: The data type of the Data Layer.
:type datatype: str
:param level: The default IBM PAIRS level for the Data Layer.
:type level: int
:param crs: CRS.
:type crs: str
:param color_table: A color table to apply to the Data Layer.
:type color_table: ibmpairs.catalog.ColorTable
:param description_internal: An internal description of the Data Layer.
:type description_internal: str
:param description_internal_links: A list of links that give context to the description internal.
:type description_internal_links: List[str]
:param formula: Formula.
:type formula: str
:param data_layer_response: A server response to a executed Data Layer method call.
:type data_layer_response: ibmpairs.catalog.DataLayerReturn
:raises Exception: An ibmpairs.client.Client is not found.
"""
#
def __str__(self):
"""
The method creates a string representation of the internal class structure.
:returns: A string representation of the internal class structure.
:rtype: str
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __repr__(self):
"""
The method creates a dict representation of the internal class structure.
:returns: A dict representation of the internal class structure.
:rtype: dict
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __init__(self,
client: cl.Client = None,
name: str = None,
description: str = None,
name_alternate: str = None,
rating: float = None,
description_short: str = None,
description_long: str = None,
description_links: List[str] = None,
data_source_name: str = None,
data_source_attribution: str = None,
data_source_description: str = None,
data_source_links: List[str] = None,
update_interval_max: str = None,
update_interval_description: str = None,
lag_horizon: str = None,
lag_horizon_description: str = None,
temporal_resolution: str = None,
temporal_resolution_description: str = None,
spatial_resolution_of_raw_data: str = None,
interpolation: str = None,
interpolation_upload: str = None,
dimensions_description: str = None,
permanence: bool = None,
permanence_description: str = None,
known_issues: str = None,
properties: Properties = None,
spatial_coverage: SpatialCoverage = None,
latitude_min: float = None,
longitude_min: float = None,
latitude_max: float = None,
longitude_max: float = None,
temporal_min: str = None,
temporal_max: str = None,
measurement_interval: str = None,
measurement_interval_description: str = None,
meaning_of_timestamp: str = None,
meaning_of_spatial_descriptor: str = None,
id: str = None,
dataset: DataSet = None,
created_at: str = None,
updated_at: str = None,
type: str = None,
unit: str = None,
dataset_id: str = None,
min_value: float = None,
max_value: float = None,
units: str = None,
datatype: str = None,
level: int = None,
crs: str = None,
color_table: ColorTable = None,
description_internal: str = None,
description_internal_links: List[str] = None,
formula: str = None,
data_layer_response: DataLayerReturn = None
):
self._client = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
self._name = name
self._description = description
self._name_alternate = name_alternate
self._rating = rating
self._description_short = description_short
self._description_long = description_long
self._description_links = description_links
self._data_source_name = data_source_name
self._data_source_attribution = data_source_attribution
self._data_source_description = data_source_description
self._data_source_links = data_source_links
self._update_interval_max = update_interval_max
self._update_interval_description = update_interval_description
self._lag_horizon = lag_horizon
self._lag_horizon_description = lag_horizon_description
self._temporal_resolution = temporal_resolution
self._temporal_resolution_description = temporal_resolution_description
self._spatial_resolution_of_raw_data = spatial_resolution_of_raw_data
self._interpolation = interpolation
self._interpolation_upload = interpolation_upload
self._dimensions_description = dimensions_description
self._permanence = permanence
self._permanence_description = permanence_description
self._known_issues = known_issues
self._properties = properties
self._spatial_coverage = spatial_coverage
self._latitude_min = latitude_min
self._longitude_min = longitude_min
self._latitude_max = latitude_max
self._longitude_max = longitude_max
self._temporal_min = temporal_min
self._temporal_max = temporal_max
self._measurement_interval = measurement_interval
self._measurement_interval_description = measurement_interval_description
self._meaning_of_timestamp = meaning_of_timestamp
self._meaning_of_spatial_descriptor = meaning_of_spatial_descriptor
self._id = id
self._dataset = dataset
self._created_at = created_at
self._updated_at = updated_at
self._type = type
self._unit = unit
self._dataset_id = dataset_id
self._min_value = min_value
self._max_value = max_value
self._units = units
self._datatype = datatype
self._level = level
self._crs = crs
self._color_table = color_table
self._description_internal = description_internal
self._description_internal_links = description_internal_links
self._formula = formula
if data_layer_response is None:
self._data_layer_response = DataLayerReturn()
else:
self._data_layer_response = data_layer_response
#
def get_client(self):
return self._client
#
def set_client(self, c):
self._client = common.check_class(c, cl.Client)
#
def del_client(self):
del self._client
#
client = property(get_client, set_client, del_client)
#
def get_name(self):
return self._name
#
def set_name(self, name):
self._name = common.check_str(name)
#
def del_name(self):
del self._name
#
name = property(get_name, set_name, del_name)
#
def get_description(self):
return self._description
#
def set_description(self, description):
self._description = common.check_str(description)
#
def del_description(self):
del self._description
#
description = property(get_description, set_description, del_description)
#
def get_name_alternate(self):
return self._name_alternate
#
def set_name_alternate(self, name_alternate):
self._name_alternate = common.check_str(name_alternate)
#
def del_name_alternate(self):
del self._name_alternate
#
name_alternate = property(get_name_alternate, set_name_alternate, del_name_alternate)
#
def get_rating(self):
return self._rating
#
def set_rating(self, rating):
self._rating = common.check_float(rating)
#
def del_rating(self):
del self._rating
#
rating = property(get_rating, set_rating, del_rating)
#
def get_description_short(self):
return self._description_short
#
def set_description_short(self, description_short):
self._description_short = common.check_str(description_short)
#
def del_description_short(self):
del self._description_short
#
description_short = property(get_description_short, set_description_short, del_description_short)
#
def get_description_long(self):
return self._description_long
#
def set_description_long(self, description_long):
self._description_long = common.check_str(description_long)
#
def del_description_long(self):
del self._description_long
#
description_long = property(get_description_long, set_description_long, del_description_long)
#
def get_description_links(self):
return self._description_links
#
def set_description_links(self, description_links):
self._description_links = common.check_class(description_links, List[str])
#
def del_description_links(self):
del self._description_links
#
description_links = property(get_description_links, set_description_links, del_description_links)
#
def get_data_source_name(self):
return self._data_source_name
#
def set_data_source_name(self, data_source_name):
self._data_source_name = common.check_str(data_source_name)
#
def del_data_source_name(self):
del self._data_source_name
#
data_source_name = property(get_data_source_name, set_data_source_name, del_data_source_name)
#
def get_data_source_attribution(self):
return self._data_source_attribution
#
def set_data_source_attribution(self, data_source_attribution):
self._data_source_attribution = common.check_str(data_source_attribution)
#
def del_data_source_attribution(self):
del self._data_source_attribution
#
data_source_attribution = property(get_data_source_attribution, set_data_source_attribution, del_data_source_attribution)
#
def get_data_source_description(self):
return self._data_source_description
#
def set_data_source_description(self, data_source_description):
self._data_source_description = common.check_str(data_source_description)
#
def del_data_source_description(self):
del self._data_source_description
#
data_source_description = property(get_data_source_description, set_data_source_description, del_data_source_description)
#
def get_data_source_links(self):
return self._data_source_links
#
def set_data_source_links(self, data_source_links):
self._data_source_links = common.check_class(data_source_links, List[str])
#
def del_data_source_links(self):
del self._data_source_links
#
data_source_links = property(get_data_source_links, set_data_source_links, del_data_source_links)
#
def get_update_interval_max(self):
return self._update_interval_max
#
def set_update_interval_max(self, update_interval_max):
self._update_interval_max = common.check_str(update_interval_max)
#
def del_update_interval_max(self):
del self._update_interval_max
#
update_interval_max = property(get_update_interval_max, set_update_interval_max, del_update_interval_max)
#
def get_update_interval_description(self):
return self._update_interval_description
#
def set_update_interval_description(self, update_interval_description):
self._update_interval_description = common.check_str(update_interval_description)
#
def del_update_interval_description(self):
del self._update_interval_description
#
update_interval_description = property(get_update_interval_description, set_update_interval_description, del_update_interval_description)
#
def get_lag_horizon(self):
return self._lag_horizon
#
def set_lag_horizon(self, lag_horizon):
self._lag_horizon = common.check_str(lag_horizon)
#
def del_lag_horizon(self):
del self._lag_horizon
#
lag_horizon = property(get_lag_horizon, set_lag_horizon, del_lag_horizon)
#
def get_lag_horizon_description(self):
return self._lag_horizon_description
#
def set_lag_horizon_description(self, lag_horizon_description):
self._lag_horizon_description = common.check_str(lag_horizon_description)
#
def del_lag_horizon_description(self):
del self._lag_horizon_description
#
lag_horizon_description = property(get_lag_horizon_description, set_lag_horizon_description, del_lag_horizon_description)
#
def get_temporal_resolution(self):
return self._temporal_resolution
#
def set_temporal_resolution(self, temporal_resolution):
self._temporal_resolution = common.check_str(temporal_resolution)
#
def del_temporal_resolution(self):
del self._temporal_resolution
#
temporal_resolution = property(get_temporal_resolution, set_temporal_resolution, del_temporal_resolution)
#
def get_temporal_resolution_description(self):
return self._temporal_resolution_description
#
def set_temporal_resolution_description(self, temporal_resolution_description):
self._temporal_resolution_description = common.check_str(temporal_resolution_description)
#
def del_temporal_resolution_description(self):
del self._temporal_resolution_description
#
temporal_resolution_description = property(get_temporal_resolution_description, set_temporal_resolution_description, del_temporal_resolution_description)
#
def get_spatial_resolution_of_raw_data(self):
return self._spatial_resolution_of_raw_data
#
def set_spatial_resolution_of_raw_data(self, spatial_resolution_of_raw_data):
self._spatial_resolution_of_raw_data = common.check_str(spatial_resolution_of_raw_data)
#
def del_spatial_resolution_of_raw_data(self):
del self._spatial_resolution_of_raw_data
#
spatial_resolution_of_raw_data = property(get_spatial_resolution_of_raw_data, set_spatial_resolution_of_raw_data, del_spatial_resolution_of_raw_data)
#
def get_interpolation(self):
return self._interpolation
#
def set_interpolation(self, interpolation):
self._interpolation = common.check_str(interpolation)
#
def del_interpolation(self):
del self._interpolation
#
interpolation = property(get_interpolation, set_interpolation, del_interpolation)
#
def get_interpolation_upload(self):
return self._interpolation_upload
#
def set_interpolation_upload(self, interpolation_upload):
self._interpolation_upload = common.check_str(interpolation_upload)
#
def del_interpolation_upload(self):
del self._interpolation_upload
#
interpolation_upload = property(get_interpolation_upload, set_interpolation_upload, del_interpolation_upload)
#
def get_dimensions_description(self):
return self._dimensions_description
#
def set_dimensions_description(self, dimensions_description):
self._dimensions_description = common.check_str(dimensions_description)
#
def del_dimensions_description(self):
del self._dimensions_description
#
dimensions_description = property(get_dimensions_description, set_dimensions_description, del_dimensions_description)
#
def get_permanence(self):
return self._permanence
#
def set_permanence(self, permanence):
self._permanence = common.check_bool(permanence)
#
def del_permanence(self):
del self._permanence
#
permanence = property(get_permanence, set_permanence, del_permanence)
#
def get_permanence_description(self):
return self._permanence_description
#
def set_permanence_description(self, permanence_description):
self._permanence_description = common.check_str(permanence_description)
#
def del_permanence_description(self):
del self._permanence_description
#
permanence_description = property(get_permanence_description, set_permanence_description, del_permanence_description)
#
def get_known_issues(self):
return self._known_issues
#
def set_known_issues(self, known_issues):
self._known_issues = common.check_str(known_issues)
#
def del_known_issues(self):
del self._known_issues
#
known_issues = property(get_known_issues, set_known_issues, del_known_issues)
#
def get_properties(self):
return self._properties
#
def set_properties(self, properties):
self._properties = common.check_class(properties, Properties)
#
def del_properties(self):
del self._properties
#
properties = property(get_properties, set_properties, del_properties)
#
def get_spatial_coverage(self):
return self._spatial_coverage
#
def set_spatial_coverage(self, spatial_coverage):
self._spatial_coverage = common.check_class(spatial_coverage, SpatialCoverage)
#
def del_spatial_coverage(self):
del self._spatial_coverage
#
spatial_coverage = property(get_spatial_coverage, set_spatial_coverage, del_spatial_coverage)
#
def get_latitude_min(self):
return self._latitude_min
#
def set_latitude_min(self, latitude_min):
self._latitude_min = common.check_float(latitude_min)
#
def del_latitude_min(self):
del self._latitude_min
#
latitude_min = property(get_latitude_min, set_latitude_min, del_latitude_min)
#
def get_longitude_min(self):
return self._longitude_min
#
def set_longitude_min(self, longitude_min):
self._longitude_min = common.check_float(longitude_min)
#
def del_longitude_min(self):
del self._longitude_min
#
longitude_min = property(get_longitude_min, set_longitude_min, del_longitude_min)
#
def get_latitude_max(self):
return self._latitude_max
#
def set_latitude_max(self, latitude_max):
self._latitude_max = common.check_float(latitude_max)
#
def del_latitude_max(self):
del self._latitude_max
#
latitude_max = property(get_latitude_max, set_latitude_max, del_latitude_max)
#
def get_longitude_max(self):
return self._longitude_max
#
def set_longitude_max(self, longitude_max):
self._longitude_max = common.check_float(longitude_max)
#
def del_longitude_max(self):
del self._longitude_max
#
longitude_max = property(get_longitude_max, set_longitude_max, del_longitude_max)
#
def get_temporal_min(self):
return self._temporal_min
#
def set_temporal_min(self, temporal_min):
self._temporal_min = common.check_str(temporal_min)
#
def del_temporal_min(self):
del self._temporal_min
#
temporal_min = property(get_temporal_min, set_temporal_min, del_temporal_min)
#
def get_temporal_max(self):
return self._temporal_max
#
def set_temporal_max(self, temporal_max):
self._temporal_max = common.check_str(temporal_max)
#
def del_temporal_max(self):
del self._temporal_max
#
temporal_max = property(get_temporal_max, set_temporal_max, del_temporal_max)
#
def get_measurement_interval(self):
return self._measurement_interval
#
def set_measurement_interval(self, measurement_interval):
self._measurement_interval = common.check_str(measurement_interval)
#
def del_measurement_interval(self):
del self._measurement_interval
#
measurement_interval = property(get_measurement_interval, set_measurement_interval, del_measurement_interval)
#
def get_measurement_interval_description(self):
return self._measurement_interval_description
#
def set_measurement_interval_description(self, measurement_interval_description):
self._measurement_interval_description = common.check_str(measurement_interval_description)
#
def del_measurement_interval_description(self):
del self._measurement_interval_description
#
measurement_interval_description = property(get_measurement_interval_description, set_measurement_interval_description, del_measurement_interval_description)
#
def get_meaning_of_timestamp(self):
return self._meaning_of_timestamp
#
def set_meaning_of_timestamp(self, meaning_of_timestamp):
self._meaning_of_timestamp = common.check_str(meaning_of_timestamp)
#
def del_meaning_of_timestamp(self):
del self._meaning_of_timestamp
#
meaning_of_timestamp = property(get_meaning_of_timestamp, set_meaning_of_timestamp, del_meaning_of_timestamp)
#
def get_meaning_of_spatial_descriptor(self):
return self._meaning_of_spatial_descriptor
#
def set_meaning_of_spatial_descriptor(self, meaning_of_spatial_descriptor):
self._meaning_of_spatial_descriptor = common.check_str(meaning_of_spatial_descriptor)
#
def del_meaning_of_spatial_descriptor(self):
del self._meaning_of_spatial_descriptor
#
meaning_of_spatial_descriptor = property(get_meaning_of_spatial_descriptor, set_meaning_of_spatial_descriptor, del_meaning_of_spatial_descriptor)
#
def get_id(self):
return self._id
#
def set_id(self, id):
self._id = common.check_str(id)
#
def del_id(self):
del self._id
#
id = property(get_id, set_id, del_id)
#
def get_dataset(self):
return self._dataset
#
def set_dataset(self, dataset):
self._dataset = common.check_class(dataset, DataSet)
#
def del_dataset(self):
del self._dataset
dataset = property(get_dataset, set_dataset, del_dataset)
#
def get_created_at(self):
return self._created_at
#
def set_created_at(self, created_at):
self._created_at = common.check_str(created_at)
#
def del_created_at(self):
del self._created_at
#
created_at = property(get_created_at, set_created_at, del_created_at)
#
def get_updated_at(self):
return self._updated_at
#
def set_updated_at(self, updated_at):
self._updated_at = common.check_str(updated_at)
#
def del_updated_at(self):
del self._updated_at
#
updated_at = property(get_updated_at, set_updated_at, del_updated_at)
#
def get_type(self):
return self._type
#
def set_type(self, type):
self._type = common.check_str(type)
#
def del_type(self):
del self._type
#
type = property(get_type, set_type, del_type)
#
def get_unit(self):
return self._unit
#
def set_unit(self, unit):
self._unit = common.check_str(unit)
#
def del_unit(self):
del self._unit
#
unit = property(get_unit, set_unit, del_unit)
#
def get_dataset_id(self):
return self._dataset_id
#
def set_dataset_id(self, dataset_id):
self._dataset_id = common.check_str(dataset_id)
#
def del_dataset_id(self):
del self._dataset_id
#
dataset_id = property(get_dataset_id, set_dataset_id, del_dataset_id)
#
def get_min_value(self):
return self._min_value
#
def set_min_value(self, min_value):
self._min_value = common.check_float(min_value)
#
def del_min_value(self):
del self._min_value
#
min_value = property(get_min_value, set_min_value, del_min_value)
#
def get_max_value(self):
return self._max_value
#
def set_max_value(self, max_value):
self._max_value = common.check_float(max_value)
#
def del_max_value(self):
del self._max_value
#
max_value = property(get_max_value, set_max_value, del_max_value)
#
def get_units(self):
return self._units
#
def set_units(self, units):
self._units = common.check_str(units)
#
def del_units(self):
del self._units
#
units = property(get_units, set_units, del_units)
#
def get_datatype(self):
return self._datatype
#
def set_datatype(self, datatype):
self._datatype = common.check_str(datatype)
#
def del_datatype(self):
del self._datatype
#
datatype = property(get_datatype, set_datatype, del_datatype)
#
def get_level(self):
return self._level
#
def set_level(self, level):
self._level = common.check_int(level)
#
def del_level(self):
del self._level
#
level = property(get_level, set_level, del_level)
#
def get_crs(self):
return self._crs
#
def set_crs(self, crs):
self._crs = common.check_str(crs)
#
def del_crs(self):
del self._crs
#
crs = property(get_crs, set_crs, del_crs)
#
def get_color_table(self):
return self._color_table
#
def set_color_table(self, color_table):
self._color_table = common.check_class(color_table, ColorTable)
#
def del_color_table(self):
del self._color_table
#
color_table = property(get_color_table, set_color_table, del_color_table)
#
def get_description_internal(self):
return self._description_internal
#
def set_description_internal(self, description_internal):
self._description_internal = common.check_str(description_internal)
#
def del_description_internal(self):
del self._description_internal
#
description_internal = property(get_description_internal, set_description_internal, del_description_internal)
#
def get_description_internal_links(self):
return self._description_internal_links
#
def set_description_internal_links(self, description_internal_links):
self._description_internal_links = common.check_class(description_internal_links, List[str])
#
def del_description_internal_links(self):
del self._description_internal_links
#
description_internal_links = property(get_description_internal_links, set_description_internal_links, del_description_internal_links)
#
def get_formula(self):
return self._formula
#
def set_formula(self, formula):
self._formula = common.check_str(formula)
#
def del_formula(self):
del self._formula
#
formula = property(get_formula, set_formula, del_formula)
#
def get_data_layer_response(self):
return self._data_layer_response
#
def set_data_layer_response(self, data_layer_response):
self._data_layer_response = common.check_class(data_layer_response, DataLayerReturn)
#
def del_data_layer_response(self):
del self._data_layer_response
#
data_layer_response = property(get_data_layer_response, set_data_layer_response, del_data_layer_response)
#
def from_dict(data_layer_dict: Any):
"""
Create a DataLayer object from a dictionary.
:param data_layer_dict: A dictionary that contains the keys of a DataLayer.
:type data_layer_dict: Any
:rtype: ibmpairs.catalog.DataLayer
:raises Exception: if not a dictionary.
"""
name = None
description = None
name_alternate = None
rating = None
description_short = None
description_long = None
description_links = None
data_source_name = None
data_source_attribution = None
data_source_description = None
data_source_links = None
update_interval_max = None
update_interval_description = None
lag_horizon = None
lag_horizon_description = None
temporal_resolution = None
temporal_resolution_description = None
spatial_resolution_of_raw_data = None
interpolation = None
interpolation_upload = None
dimensions_description = None
permanence = None
permanence_description = None
known_issues = None
properties = None
spatial_coverage = None
latitude_min = None
longitude_min = None
latitude_max = None
longitude_max = None
temporal_min = None
temporal_max = None
measurement_interval = None
measurement_interval_description = None
meaning_of_timestamp = None
meaning_of_spatial_descriptor = None
id = None
dataset = None
created_at = None
updated_at = None
type = None
unit = None
dataset_id = None
min_value = None
max_value = None
units = None
datatype = None
level = None
crs = None
color_table = None
description_internal = None
description_internal_links = None
formula = None
data_layer_response = None
common.check_dict(data_layer_dict)
if "name" in data_layer_dict:
if data_layer_dict.get("name") is not None:
name = common.check_str(data_layer_dict.get("name"))
if "description" in data_layer_dict:
if data_layer_dict.get("description") is not None:
description = common.check_str(data_layer_dict.get("description"))
if "name_alternate" in data_layer_dict:
if data_layer_dict.get("name_alternate") is not None:
name_alternate = common.check_str(data_layer_dict.get("name_alternate"))
if "rating" in data_layer_dict:
if data_layer_dict.get("rating") is not None:
rating = common.check_float(data_layer_dict.get("rating"))
if "description_short" in data_layer_dict:
if data_layer_dict.get("description_short") is not None:
description_short = common.check_str(data_layer_dict.get("description_short"))
if "description_long" in data_layer_dict:
if data_layer_dict.get("description_long") is not None:
description_long = common.check_str(data_layer_dict.get("description_long"))
if "description_links" in data_layer_dict:
if data_layer_dict.get("description_links") is not None:
description_links = common.from_list(data_layer_dict.get("description_links"), common.check_str)
if "data_source_name" in data_layer_dict:
if data_layer_dict.get("data_source_name") is not None:
data_source_name = common.check_str(data_layer_dict.get("data_source_name"))
if "data_source_attribution" in data_layer_dict:
if data_layer_dict.get("data_source_attribution") is not None:
data_source_attribution = common.check_str(data_layer_dict.get("data_source_attribution"))
if "data_source_description" in data_layer_dict:
if data_layer_dict.get("data_source_description") is not None:
data_source_description = common.check_str(data_layer_dict.get("data_source_description"))
if "data_source_links" in data_layer_dict:
if data_layer_dict.get("data_source_links") is not None:
data_source_links = common.from_list(data_layer_dict.get("data_source_links"), common.check_str)
if "update_interval_max" in data_layer_dict:
if data_layer_dict.get("update_interval_max") is not None:
update_interval_max = common.check_str(data_layer_dict.get("update_interval_max"))
if "update_interval_description" in data_layer_dict:
if data_layer_dict.get("update_interval_description") is not None:
update_interval_description = common.check_str(data_layer_dict.get("update_interval_description"))
if "lag_horizon" in data_layer_dict:
if data_layer_dict.get("lag_horizon") is not None:
lag_horizon = common.check_str(data_layer_dict.get("lag_horizon"))
if "lag_horizon_description" in data_layer_dict:
if data_layer_dict.get("lag_horizon_description") is not None:
lag_horizon_description = common.check_str(data_layer_dict.get("lag_horizon_description"))
if "temporal_resolution" in data_layer_dict:
if data_layer_dict.get("temporal_resolution") is not None:
temporal_resolution = common.check_str(data_layer_dict.get("temporal_resolution"))
if "temporal_resolution_description" in data_layer_dict:
if data_layer_dict.get("temporal_resolution_description") is not None:
temporal_resolution_description = common.check_str(data_layer_dict.get("temporal_resolution_description"))
if "spatial_resolution_of_raw_data" in data_layer_dict:
if data_layer_dict.get("spatial_resolution_of_raw_data") is not None:
spatial_resolution_of_raw_data = common.check_str(data_layer_dict.get("spatial_resolution_of_raw_data"))
if "interpolation" in data_layer_dict:
if data_layer_dict.get("interpolation") is not None:
interpolation = common.check_str(data_layer_dict.get("interpolation"))
if "interpolation_upload" in data_layer_dict:
if data_layer_dict.get("interpolation_upload") is not None:
interpolation_upload = common.check_str(data_layer_dict.get("interpolation_upload"))
if "dimensions_description" in data_layer_dict:
if data_layer_dict.get("dimensions_description") is not None:
dimensions_description = common.check_str(data_layer_dict.get("dimensions_description"))
if "permanence" in data_layer_dict:
if data_layer_dict.get("permanence") is not None:
permanence = common.check_bool(data_layer_dict.get("permanence"))
if "permanence_description" in data_layer_dict:
if data_layer_dict.get("permanence_description") is not None:
permanence_description = common.check_str(data_layer_dict.get("permanence_description"))
if "known_issues" in data_layer_dict:
if data_layer_dict.get("known_issues") is not None:
known_issues = common.check_str(data_layer_dict.get("known_issues"))
if "properties" in data_layer_dict:
if data_layer_dict.get("properties") is not None:
properties = Properties.from_dict(data_layer_dict.get("properties"))
if "spatial_coverage" in data_layer_dict:
if data_layer_dict.get("spatial_coverage") is not None:
spatial_coverage = SpatialCoverage.from_dict(data_layer_dict.get("spatial_coverage"))
if "latitude_min" in data_layer_dict:
if data_layer_dict.get("latitude_min") is not None:
latitude_min = common.check_float(data_layer_dict.get("latitude_min"))
if "longitude_min" in data_layer_dict:
if data_layer_dict.get("longitude_min") is not None:
longitude_min = common.check_float(data_layer_dict.get("longitude_min"))
if "latitude_max" in data_layer_dict:
if data_layer_dict.get("latitude_max") is not None:
latitude_max = common.check_float(data_layer_dict.get("latitude_max"))
if "longitude_max" in data_layer_dict:
if data_layer_dict.get("longitude_max") is not None:
longitude_max = common.check_float(data_layer_dict.get("longitude_max"))
if "temporal_min" in data_layer_dict:
if data_layer_dict.get("temporal_min") is not None:
temporal_min = common.check_str(data_layer_dict.get("temporal_min"))
if "temporal_max" in data_layer_dict:
if data_layer_dict.get("temporal_max") is not None:
temporal_max = common.check_str(data_layer_dict.get("temporal_max"))
if "measurement_interval" in data_layer_dict:
if data_layer_dict.get("measurement_interval") is not None:
measurement_interval = common.check_str(data_layer_dict.get("measurement_interval"))
if "measurement_interval_description" in data_layer_dict:
if data_layer_dict.get("measurement_interval_description") is not None:
measurement_interval_description = common.check_str(data_layer_dict.get("measurement_interval_description"))
if "meaning_of_timestamp" in data_layer_dict:
if data_layer_dict.get("meaning_of_timestamp") is not None:
meaning_of_timestamp = common.check_str(data_layer_dict.get("meaning_of_timestamp"))
if "meaning_of_spatial_descriptor" in data_layer_dict:
if data_layer_dict.get("meaning_of_spatial_descriptor") is not None:
meaning_of_spatial_descriptor = common.check_str(data_layer_dict.get("meaning_of_spatial_descriptor"))
if "id" in data_layer_dict:
if data_layer_dict.get("id") is not None:
id = common.check_str(data_layer_dict.get("id"))
if "dataset" in data_layer_dict:
if data_layer_dict.get("dataset") is not None:
dataset = DataSet.from_dict(data_layer_dict.get("dataset"))
if "created_at" in data_layer_dict:
if data_layer_dict.get("created_at") is not None:
created_at = common.check_str(data_layer_dict.get("created_at"))
if "updated_at" in data_layer_dict:
if data_layer_dict.get("updated_at") is not None:
updated_at = common.check_str(data_layer_dict.get("updated_at"))
if "type" in data_layer_dict:
if data_layer_dict.get("type") is not None:
type = common.check_str(data_layer_dict.get("type"))
if "unit" in data_layer_dict:
if data_layer_dict.get("unit") is not None:
unit = common.check_str(data_layer_dict.get("unit"))
if "dataset_id" in data_layer_dict:
if data_layer_dict.get("dataset_id") is not None:
dataset_id = common.check_str(data_layer_dict.get("dataset_id"))
if "min_value" in data_layer_dict:
if data_layer_dict.get("min_value") is not None:
min_value = common.check_float(data_layer_dict.get("min_value"))
if "max_value" in data_layer_dict:
if data_layer_dict.get("max_value") is not None:
max_value = common.check_float(data_layer_dict.get("max_value"))
if "units" in data_layer_dict:
if data_layer_dict.get("units") is not None:
units = common.check_str(data_layer_dict.get("units"))
if "datatype" in data_layer_dict:
if data_layer_dict.get("datatype") is not None:
datatype = common.check_str(data_layer_dict.get("datatype"))
if "level" in data_layer_dict:
if data_layer_dict.get("level") is not None:
level = common.check_int(data_layer_dict.get("level"))
if "crs" in data_layer_dict:
if (data_layer_dict.get("crs") is not None):
crs = common.check_str(data_layer_dict.get("crs"))
if "colorTable" in data_layer_dict:
if data_layer_dict.get("colorTable") is not None:
color_table = ColorTable.from_dict(data_layer_dict.get("colorTable"))
elif "color_table" in data_layer_dict:
if data_layer_dict.get("color_table") is not None:
color_table = ColorTable.from_dict(data_layer_dict.get("color_table"))
if "description_internal" in data_layer_dict:
if data_layer_dict.get("description_internal") is not None:
description_internal = common.check_str(data_layer_dict.get("description_internal"))
if "description_internal_links" in data_layer_dict:
if data_layer_dict.get("description_internal_links") is not None:
description_internal_links = common.from_list(data_layer_dict.get("description_internal_links"), common.check_str)
if "formula" in data_layer_dict:
if data_layer_dict.get("formula") is not None:
formula = common.check_str(data_layer_dict.get("formula"))
if "data_layer_response" in data_layer_dict:
if data_layer_dict.get("data_layer_response") is not None:
data_layer_response = DataLayerReturn.from_dict(data_layer_dict.get("data_layer_response"))
return DataLayer(name = name,
description = description,
name_alternate = name_alternate,
rating = rating,
description_short = description_short,
description_long = description_long,
description_links = description_links,
data_source_name = data_source_name,
data_source_attribution = data_source_attribution,
data_source_description = data_source_description,
data_source_links = data_source_links,
update_interval_max = update_interval_max,
update_interval_description = update_interval_description,
lag_horizon = lag_horizon,
lag_horizon_description = lag_horizon_description,
temporal_resolution = temporal_resolution,
temporal_resolution_description = temporal_resolution_description,
spatial_resolution_of_raw_data = spatial_resolution_of_raw_data,
interpolation = interpolation,
interpolation_upload = interpolation_upload,
dimensions_description = dimensions_description,
permanence = permanence,
permanence_description = permanence_description,
known_issues = known_issues,
properties = properties,
spatial_coverage = spatial_coverage,
latitude_min = latitude_min,
longitude_min = longitude_min,
latitude_max = latitude_max,
longitude_max = longitude_max,
temporal_min = temporal_min,
temporal_max = temporal_max,
measurement_interval = measurement_interval,
measurement_interval_description = measurement_interval_description,
meaning_of_timestamp = meaning_of_timestamp,
meaning_of_spatial_descriptor = meaning_of_spatial_descriptor,
id = id,
dataset = dataset,
created_at = created_at,
updated_at = updated_at,
type = type,
unit = unit,
dataset_id = dataset_id,
min_value = min_value,
max_value = max_value,
units = units,
datatype = datatype,
level = level,
crs = crs,
color_table = color_table,
description_internal = description_internal,
description_internal_links = description_internal_links,
formula = formula,
data_layer_response = data_layer_response
)
#
def to_dict(self):
"""
Create a dictionary from the objects structure.
:rtype: dict
"""
data_layer_dict: dict = {}
if self._name is not None:
data_layer_dict["name"] = self._name
if self._description is not None:
data_layer_dict["description"] = self._description
if self._name_alternate is not None:
data_layer_dict["name_alternate"] = self._name_alternate
if self._rating is not None:
data_layer_dict["rating"] = self._rating
if self._description_short is not None:
data_layer_dict["description_short"] = self._description_short
if self._description_long is not None:
data_layer_dict["description_long"] = self._description_long
if self._description_links is not None:
data_layer_dict["description_links"] = common.from_list(self._description_links, common.check_str)
if self._data_source_name is not None:
data_layer_dict["data_source_name"] = self._data_source_name
if self._data_source_attribution is not None:
data_layer_dict["data_source_attribution"] = self._data_source_attribution
if self._data_source_description is not None:
data_layer_dict["data_source_description"] = self._data_source_description
if self._data_source_links is not None:
data_layer_dict["data_source_links"] = common.from_list(self._data_source_links, common.check_str)
if self._update_interval_max is not None:
data_layer_dict["update_interval_max"] = self._update_interval_max
if self._update_interval_description is not None:
data_layer_dict["update_interval_description"] = self._update_interval_description
if self._lag_horizon is not None:
data_layer_dict["lag_horizon"] = self._lag_horizon
if self._lag_horizon_description is not None:
data_layer_dict["lag_horizon_description"] = self._lag_horizon_description
if self._temporal_resolution is not None:
data_layer_dict["temporal_resolution"] = self._temporal_resolution
if self._temporal_resolution_description is not None:
data_layer_dict["temporal_resolution_description"] = self._temporal_resolution_description
if self._spatial_resolution_of_raw_data is not None:
data_layer_dict["spatial_resolution_of_raw_data"] = self._spatial_resolution_of_raw_data
if self._interpolation is not None:
data_layer_dict["interpolation"] = self._interpolation
if self._interpolation_upload is not None:
data_layer_dict["interpolation_upload"] = self._interpolation_upload
if self._dimensions_description is not None:
data_layer_dict["dimensions_description"] = self._dimensions_description
if self._permanence is not None:
data_layer_dict["permanence"] = self._permanence
if self._permanence_description is not None:
data_layer_dict["permanence_description"] = self._permanence_description
if self._known_issues is not None:
data_layer_dict["known_issues"] = self._known_issues
if self._properties is not None:
data_layer_dict["properties"] = common.class_to_dict(self._properties, Properties)
if self._spatial_coverage is not None:
data_layer_dict["spatial_coverage"] = common.class_to_dict(self._spatial_coverage, SpatialCoverage)
if self._latitude_min is not None:
data_layer_dict["latitude_min"] = self._latitude_min
if self._longitude_min is not None:
data_layer_dict["longitude_min"] = self._longitude_min
if self._latitude_max is not None:
data_layer_dict["latitude_max"] = self._latitude_max
if self._longitude_max is not None:
data_layer_dict["longitude_max"] = self._longitude_max
if self._temporal_min is not None:
data_layer_dict["temporal_min"] = self._temporal_min
if self._temporal_max is not None:
data_layer_dict["temporal_max"] = self._temporal_max
if self._measurement_interval is not None:
data_layer_dict["measurement_interval"] = self._measurement_interval
if self._measurement_interval_description is not None:
data_layer_dict["measurement_interval_description"] = self._measurement_interval_description
if self._meaning_of_timestamp is not None:
data_layer_dict["meaning_of_timestamp"] = self._meaning_of_timestamp
if self._meaning_of_spatial_descriptor is not None:
data_layer_dict["meaning_of_spatial_descriptor"] = self._meaning_of_spatial_descriptor
if self._id is not None:
data_layer_dict["id"] = self._id
if self._dataset is not None:
data_layer_dict["dataset"] = common.class_to_dict(self._dataset, DataSet)
if self._created_at is not None:
data_layer_dict["created_at"] = self._created_at
if self._updated_at is not None:
data_layer_dict["updated_at"] = self._updated_at
if self._type is not None:
data_layer_dict["type"] = self._type
if self._unit is not None:
data_layer_dict["unit"] = self._unit
if self._dataset_id is not None:
data_layer_dict["dataset_id"] = self._dataset_id
if self._min_value is not None:
data_layer_dict["min_value"] = self._min_value
if self._max_value is not None:
data_layer_dict["max_value"] = self._max_value
if self._units is not None:
data_layer_dict["units"] = self._units
if self._datatype is not None:
data_layer_dict["datatype"] = self._datatype
if self._level is not None:
data_layer_dict["level"] = self._level
if self._crs is not None:
data_layer_dict["crs"] = self._crs
if self._color_table is not None:
data_layer_dict["color_table"] = common.class_to_dict(self._color_table, ColorTable)
if self._description_internal is not None:
data_layer_dict["description_internal"] = self._description_internal
if self._description_internal_links is not None:
data_layer_dict["description_internal_links"] = common.from_list(self._description_internal_links, common.check_str)
if self._formula is not None:
data_layer_dict["formula"] = self._formula
if self._data_layer_response is not None:
data_layer_dict["data_layer_response"] = common.class_to_dict(self._data_layer_response, DataLayerReturn)
return data_layer_dict
#
def to_dict_data_layer_post(self):
"""
Create a dictionary from the objects structure ready for a POST operation.
:rtype: dict
"""
data_layer_dict: dict = {}
if self._name is not None:
data_layer_dict["name"] = self._name
if self._description is not None:
data_layer_dict["description"] = self._description
if self._name_alternate is not None:
data_layer_dict["name_alternate"] = self._name_alternate
if self._rating is not None:
data_layer_dict["rating"] = self._rating
if self._description_short is not None:
data_layer_dict["description_short"] = self._description_short
if self._description_long is not None:
data_layer_dict["description_long"] = self._description_long
if self._description_links is not None:
data_layer_dict["description_links"] = common.from_list(self._description_links, common.check_str)
if self._data_source_name is not None:
data_layer_dict["data_source_name"] = self._data_source_name
if self._data_source_attribution is not None:
data_layer_dict["data_source_attribution"] = self._data_source_attribution
if self._data_source_description is not None:
data_layer_dict["data_source_description"] = self._data_source_description
if self._data_source_links is not None:
data_layer_dict["data_source_links"] = common.from_list(self._data_source_links, common.check_str)
if self._update_interval_max is not None:
data_layer_dict["update_interval_max"] = self._update_interval_max
if self._update_interval_description is not None:
data_layer_dict["update_interval_description"] = self._update_interval_description
if self._lag_horizon is not None:
data_layer_dict["lag_horizon"] = self._lag_horizon
if self._lag_horizon_description is not None:
data_layer_dict["lag_horizon_description"] = self._lag_horizon_description
if self._temporal_resolution is not None:
data_layer_dict["temporal_resolution"] = self._temporal_resolution
if self._temporal_resolution_description is not None:
data_layer_dict["temporal_resolution_description"] = self._temporal_resolution_description
if self._spatial_resolution_of_raw_data is not None:
data_layer_dict["spatial_resolution_of_raw_data"] = self._spatial_resolution_of_raw_data
if self._interpolation is not None:
data_layer_dict["interpolation"] = self._interpolation
if self._interpolation_upload is not None:
data_layer_dict["interpolation_upload"] = self._interpolation_upload
if self._dimensions_description is not None:
data_layer_dict["dimensions_description"] = self._dimensions_description
if self._permanence is not None:
data_layer_dict["permanence"] = self._permanence
if self._permanence_description is not None:
data_layer_dict["permanence_description"] = self._permanence_description
if self._known_issues is not None:
data_layer_dict["known_issues"] = self._known_issues
if self._properties is not None:
data_layer_dict["properties"] = common.class_to_dict(self._properties, Properties)
if self._spatial_coverage is not None:
data_layer_dict["spatial_coverage"] = common.class_to_dict(self._spatial_coverage, SpatialCoverage)
if self._latitude_min is not None:
data_layer_dict["latitude_min"] = self._latitude_min
if self._longitude_min is not None:
data_layer_dict["longitude_min"] = self._longitude_min
if self._latitude_max is not None:
data_layer_dict["latitude_max"] = self._latitude_max
if self._longitude_max is not None:
data_layer_dict["longitude_max"] = self._longitude_max
if self._temporal_min is not None:
data_layer_dict["temporal_min"] = self._temporal_min
if self._temporal_max is not None:
data_layer_dict["temporal_max"] = self._temporal_max
if self._measurement_interval is not None:
data_layer_dict["measurement_interval"] = self._measurement_interval
if self._measurement_interval_description is not None:
data_layer_dict["measurement_interval_description"] = self._measurement_interval_description
if self._meaning_of_timestamp is not None:
data_layer_dict["meaning_of_timestamp"] = self._meaning_of_timestamp
if self._meaning_of_spatial_descriptor is not None:
data_layer_dict["meaning_of_spatial_descriptor"] = self._meaning_of_spatial_descriptor
if self._units is not None:
data_layer_dict["units"] = self._units
if self._datatype is not None:
data_layer_dict["datatype"] = self._datatype
if self._level is not None:
data_layer_dict["level"] = self._level
if self._crs is not None:
data_layer_dict["crs"] = self._crs
if self._color_table is not None:
data_layer_dict["colorTable"] = common.class_to_dict(self._color_table, ColorTable)
if self._description_internal is not None:
data_layer_dict["description_internal"] = self._description_internal
if self._description_internal_links is not None:
data_layer_dict["description_internal_links"] = common.from_list(self._description_internal_links, common.check_str)
if self._formula is not None:
data_layer_dict["formula"] = self._formula
return data_layer_dict
#
def to_dict_data_layer_put(self):
"""
Create a dictionary from the objects structure ready for a PUT operation.
:rtype: dict
"""
data_layer_dict: dict = {}
if self._name is not None:
data_layer_dict["name"] = self._name
if self._description is not None:
data_layer_dict["description"] = self._description
if self._name_alternate is not None:
data_layer_dict["name_alternate"] = self._name_alternate
if self._rating is not None:
data_layer_dict["rating"] = self._rating
if self._description_short is not None:
data_layer_dict["description_short"] = self._description_short
if self._description_long is not None:
data_layer_dict["description_long"] = self._description_long
if self._description_links is not None:
data_layer_dict["description_links"] = common.from_list(self._description_links, common.check_str)
if self._data_source_name is not None:
data_layer_dict["data_source_name"] = self._data_source_name
if self._data_source_attribution is not None:
data_layer_dict["data_source_attribution"] = self._data_source_attribution
if self._data_source_description is not None:
data_layer_dict["data_source_description"] = self._data_source_description
if self._data_source_links is not None:
data_layer_dict["data_source_links"] = common.from_list(self._data_source_links, common.check_str)
if self._update_interval_max is not None:
data_layer_dict["update_interval_max"] = self._update_interval_max
if self._update_interval_description is not None:
data_layer_dict["update_interval_description"] = self._update_interval_description
if self._lag_horizon is not None:
data_layer_dict["lag_horizon"] = self._lag_horizon
if self._lag_horizon_description is not None:
data_layer_dict["lag_horizon_description"] = self._lag_horizon_description
if self._temporal_resolution is not None:
data_layer_dict["temporal_resolution"] = self._temporal_resolution
if self._temporal_resolution_description is not None:
data_layer_dict["temporal_resolution_description"] = self._temporal_resolution_description
if self._spatial_resolution_of_raw_data is not None:
data_layer_dict["spatial_resolution_of_raw_data"] = self._spatial_resolution_of_raw_data
if self._interpolation is not None:
data_layer_dict["interpolation"] = self._interpolation
if self._interpolation_upload is not None:
data_layer_dict["interpolation_upload"] = self._interpolation_upload
if self._dimensions_description is not None:
data_layer_dict["dimensions_description"] = self._dimensions_description
if self._permanence is not None:
data_layer_dict["permanence"] = self._permanence
if self._permanence_description is not None:
data_layer_dict["permanence_description"] = self._permanence_description
if self._known_issues is not None:
data_layer_dict["known_issues"] = self._known_issues
if self._properties is not None:
data_layer_dict["properties"] = common.class_to_dict(self._properties, Properties)
if self._spatial_coverage is not None:
data_layer_dict["spatial_coverage"] = common.class_to_dict(self._spatial_coverage, SpatialCoverage)
if self._latitude_min is not None:
data_layer_dict["latitude_min"] = self._latitude_min
if self._longitude_min is not None:
data_layer_dict["longitude_min"] = self._longitude_min
if self._latitude_max is not None:
data_layer_dict["latitude_max"] = self._latitude_max
if self._longitude_max is not None:
data_layer_dict["longitude_max"] = self._longitude_max
if self._temporal_min is not None:
data_layer_dict["temporal_min"] = self._temporal_min
if self._temporal_max is not None:
data_layer_dict["temporal_max"] = self._temporal_max
if self._measurement_interval is not None:
data_layer_dict["measurement_interval"] = self._measurement_interval
if self._measurement_interval_description is not None:
data_layer_dict["measurement_interval_description"] = self._measurement_interval_description
if self._meaning_of_timestamp is not None:
data_layer_dict["meaning_of_timestamp"] = self._meaning_of_timestamp
if self._meaning_of_spatial_descriptor is not None:
data_layer_dict["meaning_of_spatial_descriptor"] = self._meaning_of_spatial_descriptor
if self._min_value is not None:
data_layer_dict["min_value"] = self._min_value
if self._max_value is not None:
data_layer_dict["max_value"] = self._max_value
if self._description_internal is not None:
data_layer_dict["description_internal"] = self._description_internal
if self._description_internal_links is not None:
data_layer_dict["description_internal_links"] = common.from_list(self._description_internal_links, common.check_str)
if self._formula is not None:
data_layer_dict["formula"] = self._formula
return data_layer_dict
#
def from_json(data_layer_json: Any):
"""
Create a DataLayer object from json (dictonary or str).
:param data_layer_dict: A json dictionary that contains the keys of a DataLayer or a string representation of a json dictionary.
:type data_layer_dict: Any
:rtype: ibmpairs.catalog.DataLayer
:raises Exception: If not a dictionary or a string.
"""
if isinstance(data_layer_json, dict):
data_layer = DataLayer.from_dict(data_layer_json)
elif isinstance(data_layer_json, str):
data_layer_dict = json.loads(data_layer_json)
data_layer = DataLayer.from_dict(data_layer_dict)
else:
msg = messages.ERROR_FROM_JSON_TYPE_NOT_RECOGNIZED.format(type(data_layer_json), "data_layer_json")
logger.error(msg)
raise common.PAWException(msg)
return data_layer
#
def to_json(self):
"""
Create a string representation of a json dictionary from the objects structure.
:rtype: string
"""
return json.dumps(self.to_dict())
#
def to_json_data_layer_post(self):
"""
Create a string representation of a json dictionary from the objects structure ready for a POST operation.
:rtype: string
"""
return json.dumps(self.to_dict_data_layer_post())
#
def to_json_data_layer_put(self):
"""
Create a string representation of a json dictionary from the objects structure ready for a PUT operation.
:rtype: string
"""
return json.dumps(self.to_dict_data_layer_put())
#
def display(self,
columns: List[str] = ['dataset_id', 'id', 'name', 'description_short', 'description_long', 'level', 'type', 'unit']
):
"""
A method to return a pandas.DataFrame object of a get result.
:param columns: The columns to be returned in the pandas.DataFrame object, defaults to ['dataset_id', 'id', 'name', 'description_short', 'description_long', 'level', 'type', 'unit']
:type columns: List[str]
:returns: A pandas.DataFrame of attributes from the object.
:rtype: pandas.DataFrame
"""
display_dict = self.to_dict()
display_df = pd.DataFrame([display_dict], columns=columns)
if 'type' in columns:
display_df["type"] = display_df["type"].map(lambda x: "Raster" if "R" in str(x) else "Vector" if "V" in str(x) else str(x))
return display_df
#
def get(self,
id = None,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
A method to get a Data Layer.
:param id: The Data Layer ID of the Data Layer to be gathered.
:type id: str
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param verify: SSL verification
:type verify: bool
:returns: A populated DataLayer object.
:rtype: ibmpairs.catalog.DataLayer
:raises Exception: A ibmpairs.client.Client is not found,
an ID is not provided or already held in the object,
a server error occurred,
the status of the request is not 200.
"""
if id is not None:
self._id = common.check_str(id)
if self._id is None:
msg = messages.ERROR_CATALOG_DATA_LAYER_ID
logger.error(msg)
raise common.PAWException(msg)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT,
self_client = self._client)
try:
response = cli.get(url = cli.get_host() +
constants.CATALOG_DATA_LAYERS_API +
common.check_str(self._id),
verify = verify
)
except Exception as e:
msg = messages.ERROR_CLIENT_UNSPECIFIED_ERROR.format('GET', 'request', cli.get_host() + constants.CATALOG_DATA_LAYERS_API + common.check_str(self._id), e)
logger.error(msg)
raise common.PAWException(msg)
if response.status_code != 200:
error_message = 'failed'
msg = messages.ERROR_CATALOG_RESPOSE_NOT_SUCCESSFUL.format('GET', 'request', cli.get_host() + constants.CATALOG_DATA_LAYERS_API + common.check_str(self._id), response.status_code, error_message)
logger.error(msg)
raise common.PAWException(msg)
else:
data_layer_get = DataLayer.from_dict(response.json())
return data_layer_get
#
def create(self,
data_set_id: str,
data_layer_type: str,
data_layer_group: str = None,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
A method to create a Data Layer.
:param data_set_id: The Data Set ID of the Data Layer should be created for.
:type data_set_id: str
:param data_layer_type: The Data Layer type to be created, (e.g. 2draster).
:type data_layer_type: str
:param data_layer_group: In the case of vector data, the P group number the Data Layer
should be created within.
:type data_layer_group: str
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param verify: SSL verification
:type verify: bool
:raises Exception: A ibmpairs.client.Client is not found,
a Data Set ID is not provided,
a Data Layer type is not provided,
a Data Layer group is not provided and the type is a Vector,
a server error occurred,
the status of the request is not 200.
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT,
self_client = self._client)
dls = DataLayers(data_set_id = common.check_str(data_set_id),
group = data_layer_group,
layer_type = common.check_str(data_layer_type),
data_layers = [self],
client = cli
)
dls.create()
return dls
#
def update(self,
id = None,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
A method to update a Data Layer.
:param id: The Data Layer ID of the Data Layer to be updated.
:type id: str
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param verify: SSL verification
:type verify: bool
:raises Exception: A ibmpairs.client.Client is not found,
an ID is not provided or already held in the object,
a server error occurred,
the status of the request is not 200.
"""
if id is not None:
self._id = common.check_str(id)
if self._id is None:
msg = messages.ERROR_CATALOG_DATA_LAYER_ID
logger.error(msg)
raise common.PAWException(msg)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT,
self_client = self._client)
data_layer_update_json = self.to_json_data_layer_put()
try:
response = cli.put(url = cli.get_host() +
constants.CATALOG_DATA_LAYERS_API +
common.check_str(self._id),
headers = constants.CLIENT_PUT_AND_POST_HEADER,
body = data_layer_update_json,
verify = verify
)
except Exception as e:
msg = messages.ERROR_CLIENT_UNSPECIFIED_ERROR.format('PUT', 'request', cli.get_host() + constants.CATALOG_DATA_LAYERS_API + common.check_str(self._id), e)
logger.error(msg)
raise common.PAWException(msg)
if response.status_code != 200:
error_message = 'failed'
if response.json() is not None:
try:
self._data_layer_response = data_layer_return_from_dict(response.json())
error_message = self._data_layer_response.message
except:
msg = messages.INFO_CATALOG_RESPOSE_NOT_SUCCESSFUL_NO_ERROR_MESSAGE
logger.info(msg)
msg = messages.ERROR_CATALOG_RESPOSE_NOT_SUCCESSFUL.format('PUT', 'request', cli.get_host() + constants.CATALOG_DATA_LAYERS_API + common.check_str(self._id), response.status_code, error_message)
logger.error(msg)
raise common.PAWException(msg)
else:
self._data_layer_response = data_layer_return_from_dict(response.json())
msg = messages.INFO_CATALOG_DATA_LAYER_UPDATE_SUCCESS.format(self._data_layer_response.data_layer_ids)
logger.info(msg)
# To ensure a user wishes to delete, the data layer id must be specified- this will not be pulled from the object.
def delete(self,
id = None,
hard_delete = False,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
A method to delete a Data Layer.
:param id: The Data Layer ID of the Data Layer to be deleted.
:type id: str
:param hard_delete: Whether the Data Layer should be 'hard deleted', NOTE: this also deletes all data held by associated Data Layer. This step is necessary where the intention is to delete and recreate a Data Layer with the same name.
:type hard_delete: bool
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param verify: SSL verification
:type verify: bool
:raises Exception: A ibmpairs.client.Client is not found,
an ID is not provided or already held in the object,
a server error occurred,
t he status of the request is not 200.
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT,
self_client = self._client)
if hard_delete is True:
url = cli.get_host() + constants.CATALOG_DATA_LAYERS_API + common.check_str(id) + "?hard_delete=true&force=true"
else:
url = cli.get_host() + constants.CATALOG_DATA_LAYERS_API + common.check_str(id)
try:
response = cli.delete(url = url,
verify = verify)
except Exception as e:
msg = messages.ERROR_CLIENT_UNSPECIFIED_ERROR.format('DELETE', 'request', url, e)
logger.error(msg)
raise common.PAWException(msg)
if response.status_code != 200:
error_message = 'failed'
if response.json is not None:
try:
self._data_layer_response = data_layer_return_from_dict(response.json())
error_message = self._data_layer_response.message
except:
msg = messages.INFO_CATALOG_RESPOSE_NOT_SUCCESSFUL_NO_ERROR_MESSAGE
logger.info(msg)
msg = messages.ERROR_CATALOG_RESPOSE_NOT_SUCCESSFUL.format('DELETE', 'request', url, response.status_code, error_message)
logger.error(msg)
raise common.PAWException(msg)
else:
self._data_layer_response = data_layer_return_from_dict(response.json())
msg = messages.INFO_CATALOG_DATA_LAYER_DELETE_SUCCESS.format(self._data_layer_response.id)
logger.info(msg)
#
def vector_layer_definition_from_file(self,
csv_file,
data_layer_type = None,
data_layer_group = None,
number_of_layer_columns = None
):
if os.path.isfile(os.path.join(os.getcwd(), csv_file)):
csv_file = os.path.join(os.getcwd(), csv_file)
elif os.path.isfile(csv_file):
csv_file = csv_file
else:
msg = messages.ERROR_CATALOG_VECTOR_DATA_LAYER_FROM_FILE_NOT_FOUND.format(csv_file)
logger.error(msg)
raise common.PAWException(msg)
layer_type = data_layer_type
type_map = {
'integer' : 'in',
'number': 'fl',
'string' : 'st'
}
res = []
m = pd.read_csv(csv_file)
res.append(m.columns.tolist())
for i in m.values.tolist():
res.append(i)
table = Table(res)
table.infer()
schema = table.schema
if layer_type.lower() not in ['vectorpoint', 'vectorpolygon']:
msg = messages.ERROR_CATALOG_VECTOR_DATA_LAYER_FROM_FILE_LAYER_TYPE.format(layer_type.lower())
logger.error(msg)
raise common.PAWException(msg)
if (schema.descriptor['fields'][0]['type']!='integer'):
msg = messages.ERROR_CATALOG_VECTOR_DATA_LAYER_FROM_FILE_INCORRECT_TYPE.format('first','integer','contain the timestamp epoch')
logger.error(msg)
raise common.PAWException(msg)
if (layer_type.lower() in ['vectorpolygon'] and schema.descriptor.fields[1].type != 'integer'):
msg = messages.ERROR_CATALOG_VECTOR_DATA_LAYER_FROM_FILE_INCORRECT_TYPE.format('second','integer','contain the ID of a polygon')
logger.error(msg)
raise common.PAWException(msg)
else:
if (schema.descriptor['fields'][1]['type']!='number' and schema.descriptor['fields'][2]['type']!='number'):
msg = messages.ERROR_CATALOG_VECTOR_DATA_LAYER_FROM_FILE_INCORRECT_TYPE.format('second and third','number','contain the latitude and longitude values')
logger.error(msg)
raise common.PAWException(msg)
layer_list = []
for field in schema.descriptor['fields'][3:(3+number_of_layer_columns)]:
layer_list.append({"name": field['name'],"datatype":type_map[field['type']], "units":"N/A"}),
payload = {"layerType": layer_type,
"group": data_layer_group,
"layers": layer_list
}
layers = data_layers_from_dict(payload)
return layers
def raster_layer_definition_from_file(self,
data_layer_name,
filename
):
if HAS_RASTERIO:
with rasterio.open(filename) as src:
level = -1
datatype = 'xx'
epsg_number = common.check_str(src.crs.to_epsg())
# print (src.crs.to_epsg())
# print (src.dtypes[0])
if (src.dtypes[0] == 'uint8'):
datatype = 'bt'
if (src.dtypes[0] == 'uint16'):
# arr = src.read(1)
# print("min:", arr.min())
# print("max:", arr.max())
# if (arr.max() - arr.min() < 256 )
print(
"Depending on the range of data in your dataset, you might be able to convert the tif to a byte datatype to save space and increase query speed?")
datatype = 'in'
if (src.dtypes[0]== 'float16' or src.dtypes[0] == 'float32') :
datatype = 'fl'
resolution = src.res[0]
# print (resolution)
x = re.findall("(?<=UNIT\[\").*?\"", src.crs.wkt)
if "metre\"" in x:
for idx, d in enumerate(constants.RASTER_METRE_STEPS):
if d < resolution:
break
else:
for idx, d in enumerate(constants.RASTER_DEGREE_STEPS):
if d < resolution:
break
level = common.check_str(idx + 1)
definition = {
"layerType": "Raster",
"layers": [
{
"name": data_layer_name,
"colorTable": {
"id": "58"
},
"crs": "EPSG:" + epsg_number,
"level": level,
"datatype": datatype
}
]
}
layers = data_layers_from_dict(definition)
return layers
else:
msg = messages.ERROR_NO_RASTERIO
logger.error(msg)
raise common.PAWException(msg)
#
class DataLayers:
#
#_client: cl.Client
# Common
#_data_set_id: str
#_group: str
#_group_id: str
#_layer_type: str
#_data_layers: List[DataLayer]
#
#_data_layer_response: DataLayerReturn
"""
An object to represent a list of IBM PAIRS Data Layers.
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param data_set_id: The Data Set ID for the Data Layers.
:type data_set_id: str
:param group: The group name of the Data Layers.
:type group: str
:param group_id: The group ID of the Data Layers.
:type group_id: str
:param layer_type: The layer type (e.g. 2draster).
:type layer_type: str
:param data_layers: A list of Data Layers.
:type data_layers: List[DataLayer]
:param data_layer_response: A server response to a executed Data Layer method call.
:type data_layer_response: ibmpairs.catalog.DataLayerReturn
:raises Exception: An ibmpairs.client.Client is not found.
"""
#
def __str__(self):
"""
The method creates a string representation of the internal class structure.
:returns: A string representation of the internal class structure.
:rtype: str
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __repr__(self):
"""
The method creates a dict representation of the internal class structure.
:returns: A dict representation of the internal class structure.
:rtype: dict
"""
return json.dumps(self.to_dict(),
indent = constants.GLOBAL_JSON_REPR_INDENT,
sort_keys = constants.GLOBAL_JSON_REPR_SORT_KEYS)
#
def __getitem__(self, data_layer_name):
"""
A method to overload the default behaviour of the slice on this object to be an
element from the data_layers attribute.
:param data_layer_name: The name of a Data Layer to search for, if this is numeric, the method simply returns the default (list order).
:type data_layer_name: str
:raises Exception: If less than one value is found,
if more than one value is found.
"""
if isinstance(data_layer_name, int):
return self._data_layers[data_layer_name]
elif isinstance(data_layer_name, str):
index_list = []
index = 0
foundCount = 0
for data_layer in self._data_layers:
if data_layer.name is not None:
if (data_layer.name == data_layer_name):
foundCount = foundCount + 1
index_list.append(index)
else:
msg = messages.WARN_CATALOG_DATA_LAYERS_DATA_SET_OBJECT_NO_NAME.format(data_layer_name)
logger.warning(msg)
index = index + 1
if foundCount == 0:
msg = messages.ERROR_CATALOG_DATA_LAYERS_NO_DATA_SET.format(data_layer_name)
logger.error(msg)
raise common.PAWException(msg)
elif foundCount == 1:
return self._data_layers[index_list[0]]
else:
msg = messages.ERROR_CATALOG_DATA_LAYERS_MULTIPLE_IDENTICAL_NAMES.format(data_layer_name)
logger.error(msg)
raise common.PAWException(msg)
else:
msg = messages.ERROR_CATALOG_DATA_SETS_TYPE_UNKNOWN.format(type(data_layer_name))
logger.error(msg)
raise common.PAWException(msg)
#
def __init__(self,
client: cl.Client = None,
data_set_id: str = None,
group: str = None,
group_id: str = None,
layer_type: str = None,
data_layers: List[DataLayer] = None,
data_layer_response: DataLayerReturn = None,
):
self._client = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
self._data_set_id = data_set_id
self._group = group
self._group_id = group_id
self._layer_type = layer_type
self._data_layers = data_layers
if data_layer_response is None:
self._data_layer_response = DataLayerReturn()
else:
self._data_layer_response = data_layer_response
#
def get_client(self):
return self._client
#
def set_client(self, c):
self._client = common.check_class(c, cl.Client)
#
def del_client(self):
del self._client
#
client = property(get_client, set_client, del_client)
#
def get_data_set_id(self):
return self._data_set_id
#
def set_data_set_id(self, data_set_id):
self._data_set_id = common.check_str(data_set_id)
#
def del_data_set_id(self):
del self._data_set_id
#
data_set_id = property(get_data_set_id, set_data_set_id, del_data_set_id)
#
def get_group(self):
return self._group
#
def set_group(self, group):
self._group = common.check_str(group)
#
def del_group(self):
del self._group
#
group = property(get_group, set_group, del_group)
#
def get_group_id(self):
return self._group_id
#
def set_group_id(self, group_id):
self._group_id = common.check_str(group_id)
#
def del_group_id(self):
del self._group_id
#
group_id = property(get_group_id, set_group_id, del_group_id)
#
def get_layer_type(self):
return self._layer_type
#
def set_layer_type(self, layer_type):
self._layer_type = common.check_str(layer_type)
#
def del_layer_type(self):
del self._layer_type
#
layer_type = property(get_layer_type, set_layer_type, del_layer_type)
#
def get_data_layers(self):
return self._data_layers
#
def set_data_layers(self, data_layers):
self._data_layers = common.check_class(data_layers, List[DataLayer])
#
def del_data_layers(self):
del self._data_layers
#
data_layers = property(get_data_layers, set_data_layers, del_data_layers)
#
def get_data_layer_response(self):
return self._data_layer_response
#
def set_data_layer_response(self, data_layer_response):
self._data_layer_response = common.check_class(data_layer_response, DataLayerReturn)
#
def del_data_layer_response(self):
del self._data_layer_response
#
data_layer_response = property(get_data_layer_response, set_data_layer_response, del_data_layer_response)
#
def from_dict(data_layers_input: Any):
"""
Create a DataLayers object from a dictionary.
:param data_layers_dict: A dictionary that contains the keys of a DataLayers.
:type data_layers_dict: Any
:rtype: ibmpairs.catalog.DataLayers
:raises Exception: If not a dictionary.
"""
data_set_id = None
group = None
group_id = None
layer_type = None
data_layers = None
data_layer_response = None
if isinstance(data_layers_input, dict):
common.check_dict(data_layers_input)
if "data_set_id" in data_layers_input:
if data_layers_input.get("data_set_id") is not None:
data_set_id = common.check_str(data_layers_input.get("data_set_id"))
if "group" in data_layers_input:
if data_layers_input.get("group") is not None:
group = common.check_str(data_layers_input.get("group"))
if "group_id" in data_layers_input:
if data_layers_input.get("group_id") is not None:
group_id = common.check_str(data_layers_input.get("group_id"))
if "layerType" in data_layers_input:
if data_layers_input.get("layerType") is not None:
layer_type = common.check_str(data_layers_input.get("layerType"))
elif "layer_type" in data_layers_input:
if data_layers_input.get("layer_type") is not None:
layer_type = common.check_str(data_layers_input.get("layer_type"))
if "data_layers" in data_layers_input:
if data_layers_input.get("data_layers") is not None:
data_layers = common.from_list(data_layers_input.get("data_layers"), DataLayer.from_dict)
elif "layers" in data_layers_input:
if data_layers_input.get("layers") is not None:
data_layers = common.from_list(data_layers_input.get("layers"), DataLayer.from_dict)
if "data_layer_response" in data_layers_input:
if data_layers_input.get("data_layer_response") is not None:
data_layer_response = DataLayerReturn.from_dict(data_layers_input.get("data_layer_response"))
elif isinstance(data_layers_input, list):
data_layers = common.from_list(data_layers_input, DataLayer.from_dict)
else:
msg = messages.ERROR_CATALOG_DATA_LAYERS_UNKNOWN.format(type(data_layers_input))
logger.error(msg)
raise common.PAWException(msg)
return DataLayers(data_set_id = data_set_id,
group = group,
group_id = group_id,
layer_type = layer_type,
data_layers = data_layers,
data_layer_response = data_layer_response
)
#
def to_dict(self):
"""
Create a dictionary from the objects structure.
:rtype: dict
"""
data_layers_dict: dict = {}
if self._data_set_id is not None:
data_layers_dict["data_set_id"] = self._data_set_id
if self._group is not None:
data_layers_dict["group"] = self._group
if self._group_id is not None:
data_layers_dict["group_id"] = self._group_id
if self._layer_type is not None:
data_layers_dict["layer_type"] = self._layer_type
if self._data_layers is not None:
data_layers_dict["data_layers"] = common.from_list(self._data_layers, lambda item: common.class_to_dict(item, DataLayer))
if self._data_layer_response is not None:
data_layers_dict["data_layer_response"] = common.class_to_dict(self._data_layer_response, DataLayerReturn)
return data_layers_dict
#
def to_dict_data_layers_post(self):
"""
Create a dictionary from the objects structure ready for a POST operation.
:rtype: dict
"""
data_layers_dict: dict = {}
if self._group is not None:
data_layers_dict["group"] = self._group
if self._layer_type is not None:
data_layers_dict["layerType"] = self._layer_type
if self._data_layers is not None:
data_layers_dict["layers"] = common.from_list(self._data_layers, lambda item: item.to_dict_data_layer_post())
return data_layers_dict
#
def from_json(data_layers_json: Any):
"""
Create a DataLayers object from json (dictonary or str).
:param data_layers_dict: A json dictionary that contains the keys of a DataLayers or a string representation of a json dictionary.
:type data_layers_dict: Any
:rtype: ibmpairs.catalog.DataLayers
:raises Exception: If not a dictionary or a string.
"""
if isinstance(data_layers_json, dict):
data_layers = DataLayers.from_dict(data_layers_json)
elif isinstance(data_layers_json, str):
data_layers_dict = json.loads(data_layers_json)
data_layers = DataLayers.from_dict(data_layers_dict)
else:
msg = messages.ERROR_FROM_JSON_TYPE_NOT_RECOGNIZED.format(type(data_layers_json), "data_layers_json")
logger.error(msg)
raise common.PAWException(msg)
return data_layers
#
def to_json(self):
"""
Create a string representation of a json dictionary from the objects structure.
:rtype: string
"""
return json.dumps(self.to_dict())
#
def to_json_data_layers_post(self):
"""
Create a string representation of a json dictionary from the objects structure ready for a POST operation.
:rtype: string
"""
return json.dumps(self.to_dict_data_layers_post())
#
def filter_data_layers_by_attribute(self,
attribute,
value,
regex = None
):
"""
A method to filter a list of Data Layers by an attribute.
:param attribute: An attribute of a Data Layer.
:type attribute: str
:param value: A value to search for.
:type value: str
:param regex: A regex string to apply.
:type regex: str
:returns: A list of DataLayers that fit the criteria.
:rtype: List[ibmpairs.catalog.DataLayer]
:raises Exception: The value is not found in any Data Layer.
"""
filtered_data_layers: List[DataLayer] = []
for data_layer in self._data_layers:
value_from_object = getattr(data_layer, attribute)
if regex is None:
value_to_compare = value_from_object
else:
value_regex = re.search(regex, value_from_object)
if value_regex:
value_to_compare = value_regex.group(0)
if value_to_compare is not None:
if value_to_compare == value:
filtered_data_layers.append(data_layer)
if len(filtered_data_layers) <= 0:
msg = messages.ERROR_CATALOG_DATA_LAYERS_FILTER_DATA_LAYER_BY_ATTRIBUTE.format(attribute, value, common.check_str(regex))
logger.error(msg)
raise common.PAWException(msg)
return filtered_data_layers
def display(self,
columns: List[str] = ['dataset_id', 'id', 'name', 'description_short', 'description_long', 'level', 'type', 'unit'],
sort_by: str = 'id'
):
"""
A method to return a pandas.DataFrame object of get results.
:param columns: The columns to be returned in the pandas.DataFrame object, defaults to ['dataset_id', 'id', 'name', 'description_short', 'description_long', 'level', 'type', 'unit']
:type columns: List[str]
:param sort_by: A sort_by column
:type sort_by: str
:returns: A pandas.DataFrame of attributes from the data_layers object.
:rtype: pandas.DataFrame
"""
display_df = None
for data_layer in self._data_layers:
next_display = data_layer.display(columns)
if display_df is None:
display_df = next_display
else:
display_df = pd.concat([display_df, next_display])
display_df.reset_index(inplace=True, drop=True)
display_df.sort_values(by=[sort_by])
return display_df
#
def get(self,
data_set_id = None,
data_layer_group_id: str = None,
data_layer_group: str = None,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
A method to get a list of Data Layers, either all, or with specification of a
Data Set ID, those for a Data Set.
:param data_set_id: The Data Set ID to gather Data Layers for, if unspecified, the method gathers all Data Layers a user has access to.
:type data_set_id: int or str
:param data_layer_group_id: The Data Layer Group ID to filter the results on.
:type data_layer_group_id: str
:param data_layer_group: The Data Layer Group name to filter the results on.
:type data_layer_group: str
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param verify: SSL verification
:type verify: bool
:returns: A populated DataLayers object.
:rtype: ibmpairs.catalog.DataLayers
:raises Exception: A ibmpairs.client.Client is not found,
if a Data Set ID is specified but could not be found,
a server error occurred,
the status of the request is not 200.
"""
if data_set_id is not None:
self._data_set_id = common.check_str(data_set_id)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT,
self_client = self._client)
if self._data_set_id is not None:
try:
response = cli.get(url = cli.get_host() +
constants.CATALOG_DATA_SETS_API +
common.check_str(self._data_set_id) +
constants.CATALOG_DATA_SETS_LAYERS_API,
verify = verify
)
except Exception as e:
msg = messages.ERROR_CLIENT_UNSPECIFIED_ERROR.format('GET', 'request', cli.get_host() + constants.CATALOG_DATA_SETS_API + common.check_str(self._data_set_id) + constants.CATALOG_DATA_SETS_LAYERS_API, e)
logger.error(msg)
raise common.PAWException(msg)
else:
try:
response = cli.get(url = cli.get_host() +
constants.CATALOG_DATA_LAYERS_API_FULL,
verify = verify
)
except Exception as e:
msg = messages.ERROR_CLIENT_UNSPECIFIED_ERROR.format('GET', 'request', cli.get_host() + constants.CATALOG_DATA_LAYERS_API_FULL, e)
logger.error(msg)
raise common.PAWException(msg)
if response.status_code != 200:
error_message = 'failed'
if self._data_set_id is not None:
msg = messages.ERROR_CATALOG_RESPOSE_NOT_SUCCESSFUL.format('GET', 'request', cli.get_host() + constants.CATALOG_DATA_SETS_API + common.check_str(self._data_set_id) + constants.CATALOG_DATA_SETS_LAYERS_API, response.status_code, error_message)
else:
msg = messages.ERROR_CATALOG_RESPOSE_NOT_SUCCESSFUL.format('GET', 'request', cli.get_host() + constants.CATALOG_DATA_LAYERS_API_FULL, response.status_code, error_message)
logger.error(msg)
raise common.PAWException(msg)
else:
data_layers_get = DataLayers.from_dict(response.json())
self._data_layers = data_layers_get.data_layers
if data_layer_group_id is not None:
self._data_layers = data_layers_get.filter_data_layers_by_attribute(attribute = 'id',
value = data_layer_group_id,
regex = "(?<=P)(.*?)(?=C)"
)
elif (data_layer_group_id is None) and (data_layer_group is not None):
self._data_layers = data_layers_get.filter_data_layers_by_attribute(attribute = 'name',
value = data_layer_group,
regex = ".+?(?=\.)"
)
else:
self._data_layers = data_layers_get.data_layers
return self
#
def create(self,
data_set_id: str = None,
data_layer_group: str = None,
data_layer_type: str = None,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
A method to create a number of Data Layers.
:param data_set_id: The Data Set ID of the Data Layer should be created for.
:type data_set_id: str
:param data_layer_type: The Data Layer type to be created, (e.g. 2draster).
:type data_layer_type: str
:param data_layer_group: In the case of vector data, the P group number the Data Layer
should be created within.
:type data_layer_group: str
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param verify: SSL verification
:type verify: bool
:raises Exception: A ibmpairs.client.Client is not found,
a Data Set ID is not provided or set in the object,
a Data Layer type is not providedor set in the object,
a Data Layer group is not provided (or set in the object) and the type is a Vector,
a server error occurred,
the status of the request is not 200.
"""
if data_set_id is not None:
self._data_set_id = common.check_str(data_set_id)
else:
if self._data_set_id is None:
msg = messages.ERROR_CATALOG_DATA_LAYERS_SET_ID
logger.error(msg)
raise common.PAWException(msg)
if data_layer_type is not None:
self._layer_type = data_layer_type
else:
if self._layer_type is None:
msg = messages.ERROR_CATALOG_DATA_LAYERS_SET_LAYER_TYPE
logger.error(msg)
raise common.PAWException(msg)
if self._layer_type.lower() in ['vectorpoint', 'vectorpolygon']:
if data_layer_group is not None:
self._group = data_layer_group
if self._group is None:
msg = messages.ERROR_CATALOG_DATA_LAYERS_NO_GROUP
logger.error(msg)
raise common.PAWException(msg)
elif self._layer_type.lower() in ['raster']:
self._group = None
else:
msg = messages.ERROR_CATALOG_DATA_LAYERS_TYPE_UNKNOWN.format(data_layer_type)
logger.error(msg)
raise common.PAWException(msg)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT,
self_client = self._client)
data_layer_create_json = self.to_json_data_layers_post()
try:
response = cli.post(url = cli.get_host() +
constants.CATALOG_DATA_SETS_API +
common.check_str(self._data_set_id) +
constants.CATALOG_DATA_SETS_LAYERS_API,
headers = constants.CLIENT_PUT_AND_POST_HEADER,
body = data_layer_create_json,
verify = verify
)
except Exception as e:
msg = messages.ERROR_CLIENT_UNSPECIFIED_ERROR.format('POST', 'request', cli.get_host() + constants.CATALOG_DATA_SETS_API + common.check_str(self._data_set_id) + constants.CATALOG_DATA_SETS_LAYERS_API, e)
logger.error(msg)
raise common.PAWException(msg)
if response.status_code != 200:
error_message = 'failed'
if response.json() is not None:
try:
self._data_layer_response = data_layer_return_from_dict(response.json())
error_message = self._data_layer_response.message
except:
msg = messages.INFO_CATALOG_RESPOSE_NOT_SUCCESSFUL_NO_ERROR_MESSAGE
logger.info(msg)
msg = messages.ERROR_CATALOG_RESPOSE_NOT_SUCCESSFUL.format('POST', 'request', constants.CATALOG_DATA_SETS_API + common.check_str(self._data_set_id) + constants.CATALOG_DATA_SETS_LAYERS_API, response.status_code, error_message)
logger.error(msg)
raise common.PAWException(msg)
else:
self._data_layer_response = data_layer_return_from_dict(response.json())
msg = messages.INFO_CATALOG_DATA_LAYERS_CREATE_SUCCESS.format(str(self._data_layer_response.data_layer_ids))
logger.info(msg)
self.get(data_set_id = self._data_set_id)
group_id_regex = re.search("(?<=P)(.*?)(?=C)", self._data_layer_response.data_layer_ids[0])
if group_id_regex is not None:
self.set_group_id(common.check_str(group_id_regex.group(0)))
#
class Search:
#_data_sets: DataSets
#_data_layers: DataLayers
"""
An object to search Data Sets and Data Layers for search terms.
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param data_sets: A list of Data Sets.
:type data_sets: List[DataSet]
:param data_layers: A list of Data Layers.
:type data_layers: List[DataLayer]
:raises Exception: An ibmpairs.client.Client is not found.
"""
#
def __init__(self,
client: cl.Client = None,
data_sets: DataSets = None,
data_layers: DataLayers = None
):
self._client = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
self._data_sets = data_sets
self._data_layers = data_layers
#
def get_data_sets(self):
return self._data_sets
#
def set_data_sets(self, data_sets):
self._data_sets = common.check_class(data_sets, DataSets)
#
def del_data_sets(self):
del self._data_sets
#
data_sets = property(get_data_sets, set_data_sets, del_data_sets)
#
def get_data_layers(self):
return self._data_layers
#
def set_data_layers(self, data_layers):
self._data_layers = common.check_class(data_layers, DataLayers)
#
def del_data_layers(self):
del self._data_layers
#
data_layers = property(get_data_layers, set_data_layers, del_data_layers)
def get_catalog(self,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
A method to get Data Sets and Data Layers to search.
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param verify: SSL verification
:type verify: bool
:returns: A pandas.DataFrame of merged Data Set and Data Layer information.
:rtype: pandas.DataFrame
:raises Exception: An ibmpairs.client.Client is not found.
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT,
self_client = self._client)
data_set_columns = ['id', 'name', 'description_short', 'description_long']
if self._data_sets is not None:
dso = self._data_sets
else:
dso = DataSets()
dso.get(client = cli,
verify = verify)
self._data_sets = dso
ds = dso.display(columns = data_set_columns)
ds.columns = ['data_set_' + x for x in ds.columns]
#ds.index.names = ['dataset_id']
data_layer_columns = ['dataset_id', 'id', 'name', 'description_short', 'description_long', 'level', 'type', 'unit']
if self._data_layers is not None:
dlo = self._data_layers
else:
dlo = DataLayers()
dlo.get(client = cli,
verify = verify)
self._data_layers = dlo
dl = dlo.display(columns = data_layer_columns)
dl.columns = ['data_layer_' + x if x != 'dataset_id' else x for x in dl.columns]
#dl.index.names = ['datalayer_id']
catalog_merge = pd.merge(dl, ds, left_on = 'dataset_id', right_on = 'data_set_id', how = 'left')
catalog_merge.reset_index(inplace=True, drop=True)
return catalog_merge
def all(self,
search_term: str,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
A method to search Data Sets and Data Layers.
:param search_term: A search term to be used.
:type search_term: str
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param verify: SSL verification
:type verify: bool
:returns: A pandas.DataFrame of matching searched Data Sets and Data Layers.
:rtype: pandas.DataFrame
:raises Exception: An ibmpairs.client.Client is not found.
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT,
self_client = self._client)
ds = self.data_sets(search_term = search_term,
client = cli,
verify = verify
)
dl = self.data_layers(search_term = search_term,
client = cli,
verify = verify
)
frames = [ds, dl]
union = pd.concat(frames)
union.drop_duplicates(subset=None, keep='first', inplace=False)
return union
def data_sets(self,
search_term: str,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
A method to search Data Sets.
:param search_term: A search term to be used.
:type search_term: str
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param verify: SSL verification
:type verify: bool
:returns: A pandas.DataFrame of matching searched Data Sets.
:rtype: pandas.DataFrame
:raises Exception: An ibmpairs.client.Client is not found.
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT,
self_client = self._client)
ds = self.get_catalog(client = cli,
verify = verify
)
ds = ds.fillna("")
try:
float(search_term) #check if searchterm is a number, if not search df for string
search = ds.query('data_set_id ==' + search_term, engine='python')
except:
search = ds.query('data_set_name.str.contains("'+search_term+'")' or
'dataset_description_short.str.contains("'+ search_term +'")' or
'data_set_description_long.str.contains("'+ search_term +'")',
engine='python'
)
search.reset_index(inplace=True, drop=True)
return search
def data_layers(self,
search_term: str,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
A method to search Data Layers.
:param search_term: A search term to be used.
:type search_term: str
:param client: An IBM PAIRS Client.
:type client: ibmpairs.client.Client
:param verify: SSL verification
:type verify: bool
:returns: A pandas.DataFrame of matching searched Data Layers.
:rtype: pandas.DataFrame
:raises Exception: An ibmpairs.client.Client is not found.
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT,
self_client = self._client)
dl = self.get_catalog(client = cli,
verify = verify
)
dl = dl.fillna("")
try:
float(search_term)
search = dl.query('data_layer_id.str.contains("'+search_term+'")', engine='python')
except:
search = dl.query('data_layer_id.str.contains("'+search_term+'")' or
'data_layer_name.str.contains("'+search_term+'")' or
'data_layer_description_short.str.contains("'+ search_term +'")' or
'data_layer_description_long.str.contains("'+ search_term +'")',
engine='python'
)
search.reset_index(inplace=True, drop=True)
return search
#
def category_from_dict(category_dictionary: dict):
"""
The method converts a dictionary of Category to a Category object.
:param category_dict: A dictionary that contains the keys of a Category.
:type category_dict: dict
:rtype: ibmpairs.catalog.Category
:raises Exception: If not a dict.
"""
category = Category.from_dict(category_dictionary)
return category
#
def category_to_dict(category: Category):
"""
The method converts an object of Category to a dict.
:param category: A Category object.
:type category: ibmpairs.catalog.Category
:rtype: dict
"""
return Category.to_dict(category)
#
def category_from_json(category_json: Any):
"""
The method converts a dictionary or json string of Category to a Category object.
:param category_json: A dictionary or json string that contains the keys of a Category.
:type category_json: Any
:rtype: ibmpairs.catalog.Category
:raises Exception: If not a dict or a str.
"""
category = Category.from_json(category_json)
return category
#
def category_to_json(category: Category):
"""
The method converts an object of Category to a json string.
:param category: A Category object.
:type category: ibmpairs.catalog.Category
:rtype: str
"""
return Category.to_json(category)
#
def properties_from_dict(properties_dictionary: dict):
"""
The method converts a dictionary of Properties to a Properties object.
:param properties_dict: A dictionary that contains the keys of a Properties.
:type properties_dict: dict
:rtype: ibmpairs.catalog.Properties
:raises Exception: If not a dict.
"""
properties = Properties.from_dict(properties_dictionary)
return properties
#
def properties_to_dict(properties: Properties):
"""
The method converts an object of Properties to a dict.
:param properties: A Properties object.
:type properties: ibmpairs.catalog.Properties
:rtype: dict
"""
return Properties.to_dict(properties)
#
def properties_from_json(properties_json: Any):
"""
The method converts a dictionary or json string of Properties to a Properties object.
:param properties_json: A dictionary or json string that contains the keys of a Properties.
:type properties_json: Any
:rtype: ibmpairs.catalog.Properties
:raises Exception: If not a dict or a str.
"""
properties = Properties.from_json(properties_json)
return properties
#
def properties_to_json(properties: Properties):
"""
The method converts an object of Properties to a json string.
:param properties: A Properties object.
:type properties: ibmpairs.catalog.Properties
:rtype: str
"""
return Properties.to_json(properties)
#
def spatial_coverage_from_dict(spatial_coverage_dictionary: dict):
"""
The method converts a dictionary of SpatialCoverage to a SpatialCoverage object.
:param spatial_coverage_dict: A dictionary that contains the keys of a SpatialCoverage.
:type spatial_coverage_dict: dict
:rtype: ibmpairs.catalog.SpatialCoverage
:raises Exception: If not a dict.
"""
spatial_coverage = SpatialCoverage.from_dict(spatial_coverage_dictionary)
return spatial_coverage
#
def spatial_coverage_to_dict(spatial_coverage: SpatialCoverage):
"""
The method converts an object of SpatialCoverage to a dict.
:param spatial_coverage: A SpatialCoverage object.
:type spatial_coverage: ibmpairs.catalog.SpatialCoverage
:rtype: dict
"""
return SpatialCoverage.to_dict(spatial_coverage)
#
def spatial_coverage_from_json(spatial_coverage_json: Any):
"""
The method converts a dictionary or json string of SpatialCoverage to a SpatialCoverage object.
:param spatial_coverage_json: A dictionary or json string that contains the keys of a SpatialCoverage.
:type spatial_coverage_json: Any
:rtype: ibmpairs.catalog.SpatialCoverage
:raises Exception: If not a dict or a str.
"""
spatial_coverage = SpatialCoverage.from_json(spatial_coverage_json)
return spatial_coverage
#
def spatial_coverage_to_json(spatial_coverage: SpatialCoverage):
"""
The method converts an object of SpatialCoverage to a json string.
:param spatial_coverage: A SpatialCoverage object.
:type spatial_coverage: ibmpairs.catalog.SpatialCoverage
:rtype: str
"""
return SpatialCoverage.to_json(spatial_coverage)
#
def data_set_from_dict(data_set_dictionary: dict,
client: cl.Client = None):
"""
The method converts a dictionary of DataSet to a DataSet object.
:param data_set_dict: A dictionary that contains the keys of a DataSet.
:type data_set_dict: dict
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:rtype: ibmpairs.catalog.DataSet
:raises Exception: If not a dict.
"""
data_set = DataSet.from_dict(data_set_dictionary)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_set.client = cli
return data_set
#
def data_set_to_dict(data_set: DataSet):
"""
The method converts an object of DataSet to a dict.
:param data_set: A DataSet object.
:type data_set: ibmpairs.catalog.DataSet
:rtype: dict
"""
return DataSet.to_dict(data_set)
#
def data_set_to_dict_post(data_set: DataSet):
"""
The method converts an object of DataSet to a dict ready for a POST call.
:param data_set: A DataSet object.
:type data_set: ibmpairs.catalog.DataSet
:rtype: dict
"""
return DataSet.to_dict_data_set_post(data_set)
#
def data_set_to_dict_put(data_set: DataSet):
"""
The method converts an object of DataSet to a dict ready for a PUT call.
:param data_set: A DataSet object.
:type data_set: ibmpairs.catalog.DataSet
:rtype: dict
"""
return DataSet.to_dict_data_set_put(data_set)
#
def data_set_from_json(data_set_json: Any,
client: cl.Client = None):
"""
The method converts a dictionary or json string of DataSet to a DataSet object.
:param data_set_json: A dictionary or json string that contains the keys of a DataSet.
:type data_set_json: Any
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:rtype: ibmpairs.catalog.DataSet
:raises Exception: If not a dict or a str.
"""
data_set = DataSet.from_json(data_set_json)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_set.client = cli
return data_set
#
def data_set_to_json(data_set: DataSet):
"""
The method converts an object of DataSet to a json string.
:param data_set: A DataSet object.
:type data_set: ibmpairs.catalog.DataSet
:rtype: str
"""
return DataSet.to_json(data_set)
#
def data_set_to_json_post(data_set: DataSet):
"""
The method converts an object of DataSet to a json string ready for a POST call.
:param data_set: A DataSet object.
:type data_set: ibmpairs.catalog.DataSet
:rtype: str
"""
return DataSet.to_json_data_set_post(data_set)
#
def data_set_to_json_put(data_set: DataSet):
"""
The method converts an object of DataSet to a json string ready for a PUT call.
:param data_set: A DataSet object.
:type data_set: ibmpairs.catalog.DataSet
:rtype: str
"""
return DataSet.to_json_data_set_put(data_set)
#
def data_sets_from_dict(data_sets_dictionary: dict,
client: cl.Client = None):
"""
The method converts a dictionary of DataSets to a DataSets object.
:param data_sets_dict: A dictionary that contains the keys of a DataSets.
:type data_sets_dict: dict
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:rtype: ibmpairs.catalog.DataSets
:raises Exception: If not a dict.
"""
data_sets = DataSets.from_dict(data_sets_dictionary)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_sets.client = cli
return data_sets
#
def data_sets_to_dict(data_sets: DataSets):
"""
The method converts an object of DataSets to a dict.
:param data_sets: A DataSets object.
:type data_sets: ibmpairs.catalog.DataSets
:rtype: dict
"""
return DataSets.to_dict(data_sets)
#
def data_sets_from_json(data_sets_json: Any,
client: cl.Client = None):
"""
The method converts a dictionary or json string of DataSets to a DataSets object.
:param data_sets_json: A dictionary or json string that contains the keys of a DataSets.
:type data_sets_json: Any
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:rtype: ibmpairs.catalog.DataSets
:raises Exception: If not a dict or a str.
"""
data_sets = DataSets.from_json(data_sets_json)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_sets.client = cli
return data_sets
#
def data_sets_to_json(data_sets: DataSets):
"""
The method converts an object of DataSets to a json string.
:param data_sets: A DataSets object.
:type data_sets: ibmpairs.catalog.DataSets
:rtype: str
"""
return DataSets.to_json(data_sets)
#
def data_set_return_from_dict(data_set_return_dictionary: dict):
"""
The method converts a dictionary of DataSetReturn to a DataSetReturn object.
:param data_set_return_dict: A dictionary that contains the keys of a DataSetReturn.
:type data_set_return_dict: dict
:rtype: ibmpairs.catalog.DataSetReturn
:raises Exception: If not a dict.
"""
data_set_return = DataSetReturn.from_dict(data_set_return_dictionary)
return data_set_return
#
def data_set_return_to_dict(data_set_return: DataSetReturn):
"""
The method converts an object of DataSetReturn to a dict.
:param data_set_return: A DataSetReturn object.
:type data_set_return: ibmpairs.catalog.DataSetReturn
:rtype: dict
"""
return DataSetReturn.to_dict(data_set_return)
#
def data_set_return_from_json(data_set_return_json: Any):
"""
The method converts a dictionary or json string of DataSetReturn to a DataSetReturn object.
:param data_set_return_json: A dictionary or json string that contains the keys of a DataSetReturn.
:type data_set_return_json: Any
:rtype: ibmpairs.catalog.DataSetReturn
:raises Exception: If not a dict or a str.
"""
data_set_return = DataSetReturn.from_json(data_set_return_json)
return data_set_return
#
def data_set_return_to_json(data_set_return: DataSetReturn):
"""
The method converts an object of DataSetReturn to a json string.
:param data_set_return: A DataSetReturn object.
:type data_set_return: ibmpairs.catalog.DataSetReturn
:rtype: str
"""
return DataSetReturn.to_json(data_set_return)
#
def color_table_from_dict(color_table_dictionary: dict):
"""
The method converts a dictionary of ColorTable to a ColorTable object.
:param color_table_dict: A dictionary that contains the keys of a ColorTable.
:type color_table_dict: dict
:rtype: ibmpairs.catalog.ColorTable
:raises Exception: If not a dict.
"""
color_table = ColorTable.from_dict(color_table_dictionary)
return color_table
#
def color_table_to_dict(color_table: ColorTable):
"""
The method converts an object of ColorTable to a dict.
:param color_table: A ColorTable object.
:type color_table: ibmpairs.catalog.ColorTable
:rtype: dict
"""
return ColorTable.to_dict(color_table)
#
def color_table_from_json(color_table_json: Any):
"""
The method converts a dictionary or json string of ColorTable to a ColorTable object.
:param color_table_json: A dictionary or json string that contains the keys of a ColorTable.
:type color_table_json: Any
:rtype: ibmpairs.catalog.ColorTable
:raises Exception: if not a dict or a str.
"""
color_table = ColorTable.from_json(color_table_json)
return color_table
#
def color_table_to_json(color_table: ColorTable):
"""
The method converts an object of ColorTable to a json string.
:param color_table: A ColorTable object.
:type color_table: ibmpairs.catalog.ColorTable
:rtype: str
"""
return ColorTable.to_json(color_table)
#
def data_layer_return_from_dict(data_layer_return_dictionary: dict):
"""
The method converts a dictionary of DataLayerReturn to a DataLayerReturn object.
:param data_layer_return_dict: A dictionary that contains the keys of a DataLayerReturn.
:type data_layer_return_dict: dict
:rtype: ibmpairs.catalog.DataLayerReturn
:raises Exception: If not a dict.
"""
data_layer_return = DataLayerReturn.from_dict(data_layer_return_dictionary)
return data_layer_return
#
def data_layer_return_to_dict(data_layer_return: DataLayerReturn):
"""
The method converts an object of DataLayerReturn to a dict.
:param data_layer_return: A DataLayerReturn object.
:type data_layer_return: ibmpairs.catalog.DataLayerReturn
:rtype: dict
"""
return DataLayerReturn.to_dict(data_layer_return)
#
def data_layer_return_from_json(data_layer_return_json: Any):
"""
The method converts a dictionary or json string of DataLayerReturn to a DataLayerReturn object.
:param data_layer_return_json: A dictionary or json string that contains the keys of a DataLayerReturn.
:type data_layer_return_json: Any
:rtype: ibmpairs.catalog.DataLayerReturn
:raises Exception: If not a dict or a str.
"""
data_layer_return = DataLayerReturn.from_json(data_layer_return_json)
return data_layer_return
#
def data_layer_return_to_json(data_layer_return: DataLayerReturn):
"""
The method converts an object of DataLayerReturn to a json string.
:param data_layer_return: A DataLayerReturn object.
:type data_layer_return: ibmpairs.catalog.DataLayerReturn
:rtype: str
"""
return DataLayerReturn.to_json(data_layer_return)
#
def data_layer_dimension_return_from_dict(data_layer_dimension_return_dictionary: dict):
"""
The method converts a dictionary of DataLayerDimensionReturn to a DataLayerDimensionReturn object.
:param data_layer_dimension_return_dict: A dictionary that contains the keys of a DataLayerDimensionReturn.
:type data_layer_dimension_return_dict: dict
:rtype: ibmpairs.catalog.DataLayerDimensionReturn
:raises Exception: If not a dict.
"""
data_layer_dimension_return = DataLayerDimensionReturn.from_dict(data_layer_dimension_return_dictionary)
return data_layer_dimension_return
#
def data_layer_dimension_return_to_dict(data_layer_dimension_return: DataLayerDimensionReturn):
"""
The method converts an object of DataLayerDimensionReturn to a dict.
:param data_layer_dimension_return: A DataLayerDimensionReturn object.
:type data_layer_dimension_return: ibmpairs.catalog.DataLayerDimensionReturn
:rtype: dict
"""
return DataLayerDimensionReturn.to_dict(data_layer_dimension_return)
#
def data_layer_dimension_return_from_json(data_layer_dimension_return_json: Any):
"""
The method converts a dictionary or json string of DataLayerDimensionReturn to a DataLayerDimensionReturn object.
:param data_layer_dimension_return_json: A dictionary or json string that contains the keys of a DataLayerDimensionReturn.
:type data_layer_dimension_return_json: Any
:rtype: ibmpairs.catalog.DataLayerDimensionReturn
:raises Exception: If not a dict or a str.
"""
data_layer_dimension_return = DataLayerDimensionReturn.from_json(data_layer_dimension_return_json)
return data_layer_dimension_return
#
def data_layer_dimension_return_to_json(data_layer_dimension_return: DataLayerDimensionReturn):
"""
The method converts an object of DataLayerDimensionReturn to a json string.
:param data_layer_dimension_return: A DataLayerDimensionReturn object.
:type data_layer_dimension_return: ibmpairs.catalog.DataLayerDimensionReturn
:rtype: str
"""
return DataLayerDimensionReturn.to_json(data_layer_dimension_return)
#
def data_layer_dimension_from_dict(data_layer_dimension_dictionary: dict,
client: cl.Client = None):
"""
The method converts a dictionary of DataLayerDimension to a DataLayerDimension object.
:param data_layer_dimension_dict: A dictionary that contains the keys of a DataLayerDimension.
:type data_layer_dimension_dict: dict
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:rtype: ibmpairs.catalog.DataLayerDimension
:raises Exception: If not a dict.
"""
data_layer_dimension = DataLayerDimension.from_dict(data_layer_dimension_dictionary)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_layer_dimension.client = cli
return data_layer_dimension
#
def data_layer_dimension_to_dict(data_layer_dimension: DataLayerDimension):
"""
The method converts an object of DataLayerDimension to a dict.
:param data_layer_dimension: A DataLayerDimension object.
:type data_layer_dimension: ibmpairs.catalog.DataLayerDimension
:rtype: dict
"""
return DataLayerDimension.to_dict(data_layer_dimension)
#
def data_layer_dimension_to_dict_post(data_layer_dimension: DataLayerDimension):
"""
The method converts an object of DataLayerDimension to a dict ready for a POST call.
:param data_layer_dimension: A DataLayerDimension object.
:type data_layer_dimension: ibmpairs.catalog.DataLayerDimension
:rtype: dict
"""
return DataLayerDimension.to_dict_data_layer_dimension_post(data_layer_dimension)
#
def data_layer_dimension_from_json(data_layer_dimension_json: Any,
client: cl.Client = None):
"""
The method converts a dictionary or json string of DataLayerDimension to a DataLayerDimension object.
:param data_layer_dimension_json: A dictionary or json string that contains the keys of a DataLayerDimension.
:type data_layer_dimension_json: Any
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:rtype: ibmpairs.catalog.DataLayerDimension
:raises Exception: If not a dict or a str.
"""
data_layer_dimension = DataLayerDimension.from_json(data_layer_dimension_json)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_layer_dimension.client = cli
return data_layer_dimension
#
def data_layer_dimension_to_json(data_layer_dimension: DataLayerDimension):
"""
The method converts an object of DataLayerDimension to a json string.
:param data_layer_dimension: A DataLayerDimension object.
:type data_layer_dimension: ibmpairs.catalog.DataLayerDimension
:rtype: str
"""
return DataLayerDimension.to_json(data_layer_dimension)
#
def data_layer_dimension_to_json_post(data_layer_dimension: DataLayerDimension):
"""
The method converts an object of DataLayerDimension to a json string ready for a POST call.
:param data_layer_dimension: A DataLayerDimension object.
:type data_layer_dimension: ibmpairs.catalog.DataLayerDimension
:rtype: str
"""
return DataLayerDimension.to_json_data_layer_dimension_post(data_layer_dimension)
#
def data_layer_dimensions_from_dict(data_layer_dimensions_dictionary: dict,
client: cl.Client = None):
"""
The method converts a dictionary of DataLayerDimensions to a DataLayerDimensions object.
:param data_layer_dimensions_dict: A dictionary that contains the keys of a DataLayerDimensions.
:type data_layer_dimensions_dict: dict
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:rtype: ibmpairs.catalog.DataLayerDimensions
:raises Exception: If not a dict.
"""
data_layer_dimensions = DataLayerDimensions.from_dict(data_layer_dimensions_dictionary)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_layer_dimensions.client = cli
return data_layer_dimensions
#
def data_layer_dimensions_to_dict(data_layer_dimensions: DataLayerDimensions):
"""
The method converts an object of DataLayerDimensions to a dict.
:param data_layer_dimensions: A DataLayerDimensions object.
:type data_layer_dimensions: ibmpairs.catalog.DataLayerDimensions
:rtype: dict
"""
return DataLayerDimensions.to_dict(data_layer_dimensions)
#
def data_layer_dimensions_from_json(data_layer_dimensions_json: Any,
client: cl.Client = None):
"""
The method converts a dictionary or json string of DataLayerDimensions to a DataLayerDimensions object.
:param data_layer_dimensions_json: A dictionary or json string that contains the keys of a DataLayerDimensions.
:type data_layer_dimensions_json: Any
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:rtype: ibmpairs.catalog.DataLayerDimensions
:raises Exception: If not a dict or a str.
"""
data_layer_dimensions = DataLayerDimensions.from_json(data_layer_dimensions_json)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_layer_dimensions.client = cli
return data_layer_dimensions
#
def data_layer_dimensions_to_json(data_layer_dimensions: DataLayerDimensions):
"""
The method converts an object of DataLayerDimensions to a json string.
:param data_layer_dimensions: A DataLayerDimensions object.
:type data_layer_dimensions: ibmpairs.catalog.DataLayerDimensions
:rtype: str
"""
return DataLayerDimensions.to_json(data_layer_dimensions)
#
def data_layer_property_return_from_dict(data_layer_property_return_dictionary: dict):
"""
The method converts a dictionary of DataLayerPropertyReturn to a DataLayerPropertyReturn object.
:param data_layer_property_return_dict: A dictionary that contains the keys of a DataLayerPropertyReturn.
:type data_layer_property_return_dict: dict
:rtype: ibmpairs.catalog.DataLayerPropertyReturn
:raises Exception: If not a dict.
"""
data_layer_property_return = DataLayerPropertyReturn.from_dict(data_layer_property_return_dictionary)
return data_layer_property_return
#
def data_layer_property_return_to_dict(data_layer_property_return: DataLayerPropertyReturn):
"""
The method converts an object of DataLayerPropertyReturn to a dict.
:param data_layer_property_return: A DataLayerPropertyReturn object.
:type data_layer_property_return: ibmpairs.catalog.DataLayerPropertyReturn
:rtype: dict
"""
return DataLayerPropertyReturn.to_dict(data_layer_property_return)
#
def data_layer_property_return_from_json(data_layer_property_return_json: Any):
"""
The method converts a dictionary or json string of DataLayerPropertyReturn to a DataLayerPropertyReturn object.
:param data_layer_property_return_json: A dictionary or json string that contains the keys of a DataLayerPropertyReturn.
:type data_layer_property_return_json: Any
:rtype: ibmpairs.catalog.DataLayerPropertyReturn
:raises Exception: If not a dict or a str.
"""
data_layer_property_return = DataLayerPropertyReturn.from_json(data_layer_property_return_json)
return data_layer_property_return
#
def data_layer_property_return_to_json(data_layer_property_return: DataLayerPropertyReturn):
"""
The method converts an object of DataLayerPropertyReturn to a json string.
:param data_layer_property_return: A DataLayerPropertyReturn object.
:type data_layer_property_return: ibmpairs.catalog.DataLayerPropertyReturn
:rtype: str
"""
return json.dumps(DataLayerPropertyReturn.to_dict(data_layer_property_return))
#
def data_layer_property_from_dict(data_layer_property_dictionary: dict,
client: cl.Client = None):
"""
The method converts a dictionary of DataLayerProperty to a DataLayerProperty object.
:param data_layer_property_dict: A dictionary that contains the keys of a DataLayerProperty.
:type data_layer_property_dict: dict
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:rtype: ibmpairs.catalog.DataLayerProperty
:raises Exception: If not a dict.
"""
data_layer_property = DataLayerProperty.from_dict(data_layer_property_dictionary)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_layer_property.client = cli
return data_layer_property
#
def data_layer_property_to_dict(data_layer_property: DataLayerProperty):
"""
The method converts an object of DataLayerProperty to a dict.
:param data_layer_property: A DataLayerProperty object.
:type data_layer_property: ibmpairs.catalog.DataLayerProperty
:rtype: dict
"""
return DataLayerProperty.to_dict(data_layer_property)
#
def data_layer_property_to_dict_post(data_layer_property: DataLayerProperty):
"""
The method converts an object of DataLayerProperty to a dict ready for a POST call.
:param data_layer_property: A DataLayerProperty object.
:type data_layer_property: ibmpairs.catalog.DataLayerProperty
:rtype: dict
"""
return DataLayerProperty.to_dict_data_layer_property_post(data_layer_property)
#
def data_layer_property_from_json(data_layer_property_json: Any,
client: cl.Client = None):
"""
The method converts a dictionary or json string of DataLayerProperty to a DataLayerProperty object.
:param data_layer_property_json: A dictionary or json string that contains the keys of a DataLayerProperty.
:type data_layer_property_json: Any
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:rtype: ibmpairs.catalog.DataLayerProperty
:raises Exception: If not a dict or a str.
"""
data_layer_property = DataLayerProperty.from_json(data_layer_property_json)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_layer_property.client = cli
return data_layer_property
#
def data_layer_property_to_json(data_layer_property: DataLayerProperty):
"""
The method converts an object of DataLayerProperty to a json string.
:param data_layer_property: A DataLayerProperty object.
:type data_layer_property: ibmpairs.catalog.DataLayerProperty
:rtype: str
"""
return DataLayerProperty.to_json(data_layer_property)
#
def data_layer_property_to_json_post(data_layer_property: DataLayerProperty):
"""
The method converts an object of DataLayerProperty to a json string ready for a POST call.
:param data_layer_property: A DataLayerProperty object.
:type data_layer_property: ibmpairs.catalog.DataLayerProperty
:rtype: str
"""
return DataLayerProperty.to_json_data_layer_property_post(data_layer_property)
#
def data_layer_properties_from_dict(data_layer_properties_dictionary: dict,
client: cl.Client = None):
"""
The method converts a dictionary of DataLayerProperties to a DataLayerProperties object.
:param data_layer_properties_dict: A dictionary that contains the keys of a DataLayerProperties.
:type data_layer_properties_dict: dict
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:rtype: ibmpairs.catalog.DataLayerProperties
:raises Exception: if not a dict.
"""
data_layer_properties = DataLayerProperties.from_dict(data_layer_properties_dictionary)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_layer_properties.client = cli
return data_layer_properties
#
def data_layer_properties_to_dict(data_layer_properties: DataLayerProperties):
"""
The method converts an object of DataLayerProperties to a dict.
:param data_layer_properties: A DataLayerProperties object.
:type data_layer_properties: ibmpairs.catalog.DataLayerProperties
:rtype: dict
"""
return DataLayerProperties.to_dict(data_layer_properties)
#
def data_layer_properties_from_json(data_layer_properties_json: Any,
client: cl.Client = None):
"""
The method converts a dictionary or json string of DataLayerProperties to a DataLayerProperties object.
:param data_layer_properties_json: A dictionary or json string that contains the keys of a DataLayerProperties.
:type data_layer_properties_json: Any
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:rtype: ibmpairs.catalog.DataLayerProperties
:raises Exception: If not a dict or a str.
"""
data_layer_properties = DataLayerProperties.from_json(data_layer_properties_json)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_layer_properties.client = cli
return data_layer_properties
#
def data_layer_properties_to_json(data_layer_properties: DataLayerProperties):
"""
The method converts an object of DataLayerProperties to a json string.
:param data_layer_properties: A DataLayerProperties object.
:type data_layer_properties: ibmpairs.catalog.DataLayerProperties
:rtype: str
"""
return DataLayerProperties.to_json(data_layer_properties)
#
def data_layer_from_dict(data_layer_dictionary: dict,
client: cl.Client = None):
"""
The method converts a dictionary of DataLayer to a DataLayer object.
:param data_layer_dict: A dictionary that contains the keys of a DataLayer.
:type data_layer_dict: dict
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:rtype: ibmpairs.catalog.DataLayer
:raises Exception: If not a dict.
"""
data_layer = DataLayer.from_dict(data_layer_dictionary)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_layer.client = cli
return data_layer
#
def data_layer_to_dict(data_layer: DataLayer):
"""
The method converts an object of DataLayer to a dict.
:param data_layer: A DataLayer object.
:type data_layer: ibmpairs.catalog.DataLayer
:rtype: dict
"""
return DataLayer.to_dict(data_layer)
#
def data_layer_to_dict_post(data_layer: DataLayer):
"""
The method converts an object of DataLayer to a dict ready for a POST call.
:param data_layer: A DataLayer object.
:type data_layer: ibmpairs.catalog.DataLayer
:rtype: dict
"""
return DataLayer.to_dict_data_layer_post(data_layer)
#
def data_layer_to_dict_put(data_layer: DataLayer):
"""
The method converts an object of DataLayer to a dict ready for a PUT call.
:param data_layer: A DataLayer object.
:type data_layer: ibmpairs.catalog.DataLayer
:rtype: dict
"""
return DataLayer.to_dict_data_layer_put(data_layer)
#
def data_layer_from_json(data_layer_json: Any,
client: cl.Client = None):
"""
The method converts a dictionary or json string of DataLayer to a DataLayer object.
:param data_layer_json: A dictionary or json string that contains the keys of a DataLayer.
:type data_layer_json: Any
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:rtype: ibmpairs.catalog.DataLayer
:raises Exception: If not a dict or a str.
"""
data_layer = DataLayer.from_json(data_layer_json)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_layer.client = cli
return data_layer
#
def data_layer_to_json(data_layer: DataLayer):
"""
The method converts an object of DataLayer to a json string.
:param data_layer: A DataLayer object.
:type data_layer: ibmpairs.catalog.DataLayer
:rtype: str
"""
return DataLayer.to_json(data_layer)
#
def data_layer_to_json_post(data_layer: DataLayer):
"""
The method converts an object of DataLayer to a json string ready for a POST call.
:param data_layer: A DataLayer object.
:type data_layer: ibmpairs.catalog.DataLayer
:rtype: str
"""
return DataLayer.to_json_data_layer_post(data_layer)
#
def data_layer_to_json_put(data_layer: DataLayer):
"""
The method converts an object of DataLayer to a json string ready for a PUT call.
:param data_layer: A DataLayer object.
:type data_layer: ibmpairs.catalog.DataLayer
:rtype: str
"""
return DataLayer.to_json_data_layer_put(data_layer)
#
def data_layers_from_dict(data_layers_dictionary: dict,
client: cl.Client = None):
"""
The method converts a dictionary of DataLayers to a DataLayers object.
:param data_layers_dict: A dictionary that contains the keys of a DataLayers.
:type data_layers_dict: dict
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:rtype: ibmpairs.catalog.DataLayers
:raises Exception: If not a dict.
"""
data_layers = DataLayers.from_dict(data_layers_dictionary)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_layers.client = cli
return data_layers
#
def data_layers_to_dict(data_layers: DataLayers):
"""
The method converts an object of DataLayers to a dict.
:param data_layers: A DataLayers object.
:type data_layers: ibmpairs.catalog.DataLayers
:rtype: dict
"""
return DataLayers.to_dict(data_layers)
#
def data_layers_to_dict_post(data_layers: DataLayers):
"""
The method converts an object of DataLayers to a dict ready for a POST call.
:param data_layers: A DataLayers object.
:type data_layers: ibmpairs.catalog.DataLayers
:rtype: dict
"""
return DataLayers.to_dict_data_layers_post(data_layers)
#
def data_layers_from_json(data_layers_json: Any,
client: cl.Client = None):
"""
The method converts a dictionary or json string of DataLayers to a DataLayers object.
:param data_layers_json: A dictionary or json string that contains the keys of a DataLayers.
:type data_layers_json: Any
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:rtype: ibmpairs.catalog.DataLayers
:raises Exception: If not a dict or a str.
"""
data_layers = DataLayers.from_json(data_layers_json)
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_layers.client = cli
return data_layers
#
def data_layers_to_json(data_layers: DataLayers):
"""
The method converts an object of DataLayers to a json string.
:param data_layers: A DataLayers object.
:type data_layers: ibmpairs.catalog.DataLayers
:rtype: str
"""
return DataLayers.to_json(data_layers)
#
def data_layers_to_json_post(data_layers: DataLayers):
"""
The method converts an object of DataLayers to a json string ready for a POST call.
:param data_layers: A DataLayers object.
:type data_layers: ibmpairs.catalog.DataLayers
:rtype: str
"""
return DataLayers.to_json_data_layers_post(data_layers)
# fold: Catalog Methods {{{
#
def get_data_sets(client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
The method gets metadata about all DataSets from the server side.
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:param verify: SSL Verification flag.
:type verify: bool
:rtype: ibmpairs.catalog.DataSets
:raises Exception: If a global client is not yet and no client is provided
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_sets = DataSets()
data_sets.get(client = cli,
verify = verify
)
return data_sets
#
def get_data_set(id,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
The method gets metadata about a DataSet from the server.
:param id: A DataSet ID number.
:type id: int or str
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:param verify: SSL Verification flag.
:type verify: bool
:rtype: ibmpairs.catalog.DataSet
:raises Exception: If a global client is not yet and no client is provided
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_set = DataSet()
ds = data_set.get(id = common.check_str(id),
client = cli,
verify = verify
)
return ds
#
def create_data_set(data_set: DataSet,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
Creates a DataSet from a DataSet object.
:param data_set: A DataSet object.
:type data_set: ibmpairs.catalog.DataSet
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:param verify: SSL Verification flag.
:type verify: bool
:rtype: ibmpairs.catalog.DataSet
:raises Exception: If a global client is not yet and no client is provided
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_set.create(client = cli,
verify = verify
)
return data_set
#
def update_data_set(data_set: DataSet,
id = None,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
Updates a DataSet from a DataSet object.
:param data_set: A DataSet object.
:type data_set: ibmpairs.catalog.DataSet
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:param verify: SSL Verification flag.
:type verify: bool
:rtype: ibmpairs.catalog.DataSet
:raises Exception: If a global client is not yet and no client is provided
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_set.update(id = id,
client = cli,
verify = verify
)
return data_set
#
def delete_data_set(id,
hard_delete: bool = False,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
Deletes a DataSet.
:param id: A DataSet ID number.
:type id: ibmpairs.catalog.DataSet
:param hard_delete: A flag to indicate whether a hard delete should be performed.
This is necessary where the intention is to re-create a DataSet
with the same name. WARNING: when a hard delete is performed
any data associated with the DataSet is deleted too.
:type hard_delete: bool
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:param verify: SSL Verification flag.
:type verify: bool
:rtype: ibmpairs.catalog.DataSet
:raises Exception: If a global client is not yet and no client is provided
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_set = DataSet()
data_set.delete(id = common.check_str(id),
hard_delete = hard_delete,
client = cli,
verify = verify
)
return data_set
#
def get_data_layers(data_set_id = None,
data_layer_group_id: str = None,
data_layer_group: str = None,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
The method gets metadata about all DataLayers from the server or a selection by DataSet.
:param data_set_id: A DataSet ID Number (if desire is to get only DataLayers that belong to a certain DataSet).
:type data_set_id: int or str
:param data_layer_group_id: The Data Layer Group ID to filter the results on.
:type data_layer_group_id: str
:param data_layer_group: The Data Layer Group name to filter the results on.
:type data_layer_group: str
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:param verify: SSL Verification flag.
:type verify: bool
:rtype: ibmpairs.catalog.DataLayers
:raises Exception: If a global client is not yet and no client is provided
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_layers = DataLayers()
data_layers.get(data_set_id = data_set_id,
data_layer_group_id = data_layer_group_id,
data_layer_group = data_layer_group,
client = cli,
verify = verify
)
return data_layers
#
def create_data_layers(data_layers: DataLayers,
data_set_id = None,
data_layer_type: str = None,
data_layer_group: str = None,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
Creates a list of DataLayers from a DataLayers object.
:param data_layers: A DataLayers object.
:type data_layers: ibmpairs.catalog.DataLayer
:param data_set_id: A DataSet ID number.
:type data_set_id: int or str
:param data_layer_type: A DataLayer type (i.e. Raster or VectorPoint or VectorPolygon).
:type data_layer_type: str
:param data_layer_group: A DataLayer group name (if vector).
:type data_layer_group: str
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:param verify: SSL Verification flag.
:type verify: bool
:rtype: ibmpairs.catalog.DataLayers
:raises Exception: If a global client is not yet and no client is provided
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_layers.create(data_set_id = data_set_id,
data_layer_type = data_layer_type,
data_layer_group = data_layer_group,
client = cli,
verify = verify
)
return data_layers
#
def get_data_layer(id,
client:cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
The method gets metadata about a DataLayer from the server.
:param data_set_id: A DataLayer ID number.
:type data_set_id: int or str
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:param verify: SSL Verification flag.
:type verify: bool
:rtype: ibmpairs.catalog.DataLayer
:raises Exception: If a global client is not yet and no client is provided
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_layer = DataLayer()
dl = data_layer.get(id = id,
client = cli,
verify = verify
)
return dl
#
def create_data_layer(data_layer: DataLayer,
data_set_id,
data_layer_type: str,
data_layer_group: str = None,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
Creates a DataLayer from a DataLayer object.
:param data_layer: A DataLayer object.
:type data_layer: ibmpairs.catalog.DataLayer
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:param verify: SSL Verification flag.
:type verify: bool
:rtype: ibmpairs.catalog.DataLayer
:raises Exception: If a global client is not yet and no client is provided
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
dls = data_layer.create(data_set_id = data_set_id,
data_layer_type = data_layer_type,
data_layer_group = data_layer_group,
client = cli,
verify = verify
)
return dls
#
def update_data_layer(data_layer: DataLayer,
id = None,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
Updates a DataLayer from a DataLayer object.
:param data_layer: A DataLayer object.
:type data_layer: ibmpairs.catalog.DataLayer
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:param verify: SSL Verification flag.
:type verify: bool
:rtype: ibmpairs.catalog.DataLayer
:raises Exception: If a global client is not yet and no client is provided
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_layer.update(id = id,
client = cli,
verify = verify
)
return data_layer
#
def delete_data_layer(id,
hard_delete: bool = False,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
Deletes a DataLayer.
:param id: A DataLayer ID number.
:type id: ibmpairs.catalog.DataLayer
:param hard_delete: A flag to indicate whether a hard delete should be performed.
This is necessary where the intention is to re-create a DataLayer
in a DataSet with the same name. WARNING: when a hard delete is
performed any data associated with the DataLayer is deleted too.
:type hard_delete: bool
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:param verify: SSL Verification flag.
:type verify: bool
:rtype: ibmpairs.catalog.DataLayer
:raises Exception: If a global client is not yet and no client is provided
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_layer = DataLayer()
data_layer.delete(id = common.check_str(id),
hard_delete = hard_delete,
client = cli,
verify = verify
)
return data_layer
#
def get_data_layer_dimensions(data_layer_id = None,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
The method gets metadata about all DataLayerDimensions in a DataLayer from the server.
:param data_layer_id: A DataLayer ID Number.
:type data_layer_id: int or str
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:param verify: SSL Verification flag.
:type verify: bool
:rtype: ibmpairs.catalog.DataLayerDimesnions
:raises Exception: If a global client is not yet and no client is provided
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_layer_dimensions = DataLayerDimensions()
data_layer_dimensions.get(data_layer_id = data_layer_id,
client = cli,
verify = verify
)
return data_layer_dimensions
#
def get_data_layer_dimension(id,
client:cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
The method gets metadata about a DataLayerDimension from the server.
:param id: A DataLayerDimension ID Number.
:type id: int or str
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:param verify: SSL Verification flag.
:type verify: bool
:rtype: ibmpairs.catalog.DataLayerDimension
:raises Exception: If a global client is not yet and no client is provided
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_layer_dimension = DataLayerDimension()
dld = data_layer_dimension.get(id = id,
client = cli,
verify = verify
)
return dld
#
def create_data_layer_dimension(data_layer_dimension: DataLayerDimension,
data_layer_id,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
Creates a DataLayerDimension in a DataLayer from a DataLayerDimension object.
:param data_layer_dimension: A DataLayerDimension object.
:type data_layer_dimension: ibmpairs.catalog.DataLayerDimension
:param data_layer_id: A DataLayer ID number.
:type data_layer_id: int or str
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:param verify: SSL Verification flag.
:type verify: bool
:rtype: ibmpairs.catalog.DataLayerDimension
:raises Exception: If a global client is not yet and no client is provided
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_layer_dimension.create(data_layer_id = data_layer_id,
client = cli,
verify = verify
)
return data_layer_dimension
#
def get_data_layer_properties(data_layer_id = None,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
The method gets metadata about all DataLayerProperties in a DataLayer from the server.
:param data_layer_id: A DataLayer ID Number.
:type data_layer_id: int or str
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:param verify: SSL Verification flag.
:type verify: bool
:rtype: ibmpairs.catalog.DataLayerProperties
:raises Exception: If a global client is not yet and no client is provided
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_layer_properties = DataLayerProperties()
data_layer_properties.get(data_layer_id = data_layer_id,
client = cli,
verify = verify
)
return data_layer_properties
#
def get_data_layer_property(id,
client:cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
The method gets metadata about a DataLayerProperty from the server.
:param id: A DataLayerProperty ID Number.
:type id: int or str
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:param verify: SSL Verification flag.
:type verify: bool
:rtype: ibmpairs.catalog.DataLayerProperty
:raises Exception: If a global client is not yet and no client is provided
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_layer_property = DataLayerProperty()
dlp = data_layer_property.get(id = id,
client = cli,
verify = verify
)
return dlp
#
def create_data_layer_property(data_layer_property: DataLayerProperty,
data_layer_id,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
Creates a DataLayerProperty in a DataLayer from a DataLayerProperty object.
:param data_layer_property: A DataLayerProperty object.
:type data_layer_property: ibmpairs.catalog.DataLayerProperty
:param data_layer_id: A DataLayer ID number.
:type data_layer_id: int or str
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:param verify: SSL Verification flag.
:type verify: bool
:rtype: ibmpairs.catalog.DataLayerProperty
:raises Exception: If a global client is not yet and no client is provided
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
data_layer_property.create(data_layer_id = data_layer_id,
client = cli,
verify = verify
)
return data_layer_property
#
def search(search_term: str,
client: cl.Client = None,
verify: bool = constants.GLOBAL_SSL_VERIFY
):
"""
Creates a DataLayerProperty in a DataLayer from a DataLayerProperty object.
:param search_term: A free text search term used to search DataSets and DataLayers.
:type search_term: str
:param client: An IBM PAIRS client.
:type client: ibmpairs.client.Client
:param verify: SSL verification
:type verify: bool
:rtype: pandas.DataFrame
:raises Exception: If a global client is not yet and no client is provided
"""
cli = common.set_client(input_client = client,
global_client = cl.GLOBAL_PAIRS_CLIENT)
so = Search()
search = so.all(search_term = search_term,
client = cli,
verify = verify
)
return search
| 39.817725
| 265
| 0.589093
| 45,627
| 412,432
| 4.98422
| 0.0112
| 0.068663
| 0.021489
| 0.020179
| 0.908959
| 0.86376
| 0.832878
| 0.802414
| 0.774109
| 0.738702
| 0
| 0.000704
| 0.346035
| 412,432
| 10,358
| 266
| 39.817725
| 0.84246
| 0.243839
| 0
| 0.619433
| 0
| 0.002313
| 0.047312
| 0.015242
| 0
| 0
| 0
| 0
| 0
| 1
| 0.147677
| false
| 0
| 0.002313
| 0.032581
| 0.264315
| 0.000193
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
bf580f12fe3ae3241ea54cc7e96d82d203910dbc
| 2,738
|
py
|
Python
|
tests/playground.py
|
andre4k14/sudoku_solver
|
e7e308c23d7dd1e9a9349970d601a60554396d80
|
[
"MIT"
] | null | null | null |
tests/playground.py
|
andre4k14/sudoku_solver
|
e7e308c23d7dd1e9a9349970d601a60554396d80
|
[
"MIT"
] | null | null | null |
tests/playground.py
|
andre4k14/sudoku_solver
|
e7e308c23d7dd1e9a9349970d601a60554396d80
|
[
"MIT"
] | null | null | null |
import sys
import signal
from sudoku_solver.sudokusolver import SudokuArray, SudokuSolver
from sudoku_solver import solve_sudoku
def cleanup(*args):
print("The program is stopping")
sys.exit(0)
def main():
test_1 = [[7, 9, 0, 2, 0, 0, 0, 6, 0],
[2, 0, 0, 4, 0, 3, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 7, 0, 0, 0, 0, 4, 0, 9],
[0, 0, 2, 8, 0, 4, 7, 0, 0],
[8, 0, 1, 0, 0, 0, 0, 5, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[5, 0, 0, 6, 0, 8, 0, 0, 3],
[0, 2, 0, 0, 0, 5, 0, 4, 8]]
test_2 = [[5, 0, 0, 4, 0, 0, 0, 0, 9],
[9, 0, 0, 0, 0, 0, 4, 0, 7],
[0, 0, 0, 0, 0, 0, 0, 5, 0],
[6, 0, 2, 0, 4, 9, 0, 0, 0],
[0, 7, 0, 0, 6, 3, 0, 0, 0],
[0, 3, 0, 7, 0, 0, 6, 0, 8],
[2, 0, 0, 1, 3, 6, 0, 7, 5],
[0, 0, 5, 9, 0, 8, 2, 0, 0],
[0, 8, 3, 0, 0, 4, 9, 1, 0]]
test_3 = [[1, 0, 5, 0, 0, 0, 4, 9, 2],
[0, 0, 2, 1, 4, 5, 0, 8, 3],
[0, 3, 6, 2, 0, 9, 0, 0, 5],
[8, 0, 0, 0, 6, 0, 2, 0, 9],
[7, 0, 0, 8, 1, 0, 3, 4, 6],
[0, 6, 3, 0, 9, 4, 0, 0, 1],
[0, 1, 4, 0, 0, 8, 5, 0, 7],
[0, 9, 0, 0, 3, 0, 0, 0, 0],
[3, 0, 0, 0, 5, 0, 9, 6, 8]]
test_4 = [[0, 0, 0, 0, 0, 0, 0, 0, 0], # empty
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]]
test_5 = [[1, 0, 0, 0, 0, 0, 0, 0, 2], # unsolvable
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 4, 0, 0, 0, 3, 0, 0],
[0, 0, 0, 1, 0, 2, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 3, 0, 4, 0, 0, 0],
[0, 0, 2, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[3, 0, 0, 0, 0, 0, 0, 0, 4]]
solve_sudoku(test_1, 2)
sudoku = SudokuArray(test_2)
sudoku = SudokuArray(test_3)
sudoku = SudokuArray(test_4)
sudoku = SudokuArray(test_5)
sudoku_s = SudokuSolver(sudoku)
sudoku_s.solve(10)
sudoku_solved = sudoku_s.solved_sudoku_array
print(sudoku_solved.create_representation_sudoku())
sudoku_solved.print_sudoku()
print(sudoku_solved.create_representation_sudoku())
if __name__ == '__main__':
signal.signal(signal.SIGINT, cleanup)
try:
main()
except KeyboardInterrupt:
cleanup()
| 32.987952
| 64
| 0.369978
| 512
| 2,738
| 1.908203
| 0.085938
| 0.42784
| 0.497441
| 0.548618
| 0.400205
| 0.35824
| 0.24565
| 0.191402
| 0.170931
| 0.135107
| 0
| 0.263854
| 0.420015
| 2,738
| 82
| 65
| 33.390244
| 0.351385
| 0.005844
| 0
| 0.185714
| 0
| 0
| 0.011401
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028571
| false
| 0
| 0.057143
| 0
| 0.085714
| 0.057143
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
bf6c7790b9aca705d7735a54dd7153b6634df954
| 24
|
py
|
Python
|
fetch/__init__.py
|
wilsonj806/nyc-tree-data-fetcher
|
12ddfb8da11cf3b4f272cab167b7220d8744abca
|
[
"MIT"
] | null | null | null |
fetch/__init__.py
|
wilsonj806/nyc-tree-data-fetcher
|
12ddfb8da11cf3b4f272cab167b7220d8744abca
|
[
"MIT"
] | null | null | null |
fetch/__init__.py
|
wilsonj806/nyc-tree-data-fetcher
|
12ddfb8da11cf3b4f272cab167b7220d8744abca
|
[
"MIT"
] | null | null | null |
from .fetch import Fetch
| 24
| 24
| 0.833333
| 4
| 24
| 5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 24
| 1
| 24
| 24
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
bf919a1283f55af1cdc27d28aae93405cee0c4c1
| 97
|
py
|
Python
|
batchspawner/tests/conftest.py
|
dylex/jupyterhub-batchspawner
|
ce6f09c60c5e6814f44249b71e77f04e802747b9
|
[
"BSD-3-Clause"
] | 123
|
2016-07-23T07:04:43.000Z
|
2022-03-29T11:43:41.000Z
|
batchspawner/tests/conftest.py
|
NCAR/batchspawner
|
a31e9e2cd6803c3b940745cc7495c01a3c23badd
|
[
"BSD-3-Clause"
] | 186
|
2016-07-21T14:54:45.000Z
|
2022-03-31T14:18:58.000Z
|
batchspawner/tests/conftest.py
|
NCAR/batchspawner
|
a31e9e2cd6803c3b940745cc7495c01a3c23badd
|
[
"BSD-3-Clause"
] | 107
|
2016-07-27T22:08:50.000Z
|
2022-03-17T08:15:26.000Z
|
"""py.test fixtures imported from Jupyterhub testing"""
from jupyterhub.tests.conftest import *
| 24.25
| 55
| 0.783505
| 12
| 97
| 6.333333
| 0.833333
| 0.368421
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113402
| 97
| 3
| 56
| 32.333333
| 0.883721
| 0.505155
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
44c92c07f253a29e8776a15c92cb5cc5433fe631
| 101
|
py
|
Python
|
src/utils.py
|
mayankjobanputra/UQA-fever
|
c8f8c8cf7f7659ca88a3de1969538740c28803ff
|
[
"MIT"
] | null | null | null |
src/utils.py
|
mayankjobanputra/UQA-fever
|
c8f8c8cf7f7659ca88a3de1969538740c28803ff
|
[
"MIT"
] | null | null | null |
src/utils.py
|
mayankjobanputra/UQA-fever
|
c8f8c8cf7f7659ca88a3de1969538740c28803ff
|
[
"MIT"
] | 1
|
2021-11-06T14:29:53.000Z
|
2021-11-06T14:29:53.000Z
|
import progressbar
def get_bar(max_value):
return progressbar.ProgressBar(max_value=max_value)
| 16.833333
| 55
| 0.811881
| 14
| 101
| 5.571429
| 0.571429
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118812
| 101
| 5
| 56
| 20.2
| 0.876404
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 6
|
44ee7d1f9319f36173ae85161dd7c0b4490448f5
| 193
|
py
|
Python
|
core/views.py
|
Stanislav-Rybonka/studentsdb
|
efb1440db4ec640868342a5f74cd48784268781f
|
[
"MIT"
] | 1
|
2020-03-02T20:55:04.000Z
|
2020-03-02T20:55:04.000Z
|
core/views.py
|
Stanislav-Rybonka/studentsdb
|
efb1440db4ec640868342a5f74cd48784268781f
|
[
"MIT"
] | 6
|
2020-06-05T17:18:41.000Z
|
2022-03-11T23:14:47.000Z
|
core/views.py
|
Stanislav-Rybonka/studentsdb
|
efb1440db4ec640868342a5f74cd48784268781f
|
[
"MIT"
] | null | null | null |
from django.views.generic import TemplateView
class HomePageView(TemplateView):
template_name = 'site_/home.html'
class TeamPageView(TemplateView):
template_name = 'site_/team.html'
| 21.444444
| 45
| 0.777202
| 22
| 193
| 6.636364
| 0.681818
| 0.273973
| 0.328767
| 0.383562
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129534
| 193
| 9
| 46
| 21.444444
| 0.869048
| 0
| 0
| 0
| 0
| 0
| 0.154639
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
780ef2cfabdd00393469fc2a6161372843c9bd37
| 1,767
|
py
|
Python
|
scripts/statistics/pedestrians_by_conditions.py
|
lopiola/integracja_wypadki
|
270c8784041c9b857c32f06099434d3ecb57319f
|
[
"MIT"
] | null | null | null |
scripts/statistics/pedestrians_by_conditions.py
|
lopiola/integracja_wypadki
|
270c8784041c9b857c32f06099434d3ecb57319f
|
[
"MIT"
] | null | null | null |
scripts/statistics/pedestrians_by_conditions.py
|
lopiola/integracja_wypadki
|
270c8784041c9b857c32f06099434d3ecb57319f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from scripts.db_api import accident
def general_query():
return '''
SELECT count(*), (select count(*) from accident
join person on(acc_id = accident.id)
where country = 'USA'
and person.type = 'PEDESTRIAN') as pedestrian
from accident
where country = 'USA';
'''
def rain_query():
return '''
SELECT count(*), (select count(*) from accident
join person on(acc_id = accident.id)
where country = 'USA'
and rain='YES'
and person.type = 'PEDESTRIAN') as pedestrian
from accident
where country = 'USA'
and rain='YES';
'''
def snow_query():
return '''
SELECT count(*), (select count(*) from accident
join person on(acc_id = accident.id)
where country = 'USA'
and snow='YES'
and person.type = 'PEDESTRIAN') as pedestrian
from accident
where country = 'USA'
and snow='YES';
'''
def fog_query():
return '''
SELECT count(*), (select count(*) from accident
join person on(acc_id = accident.id)
where country = 'USA'
and fog='YES'
and person.type = 'PEDESTRIAN') as pedestrian
from accident
where country = 'USA'
and fog='YES';
'''
def dark_query():
return '''
SELECT count(*), (select count(*) from accident
join person on(acc_id = accident.id)
where country = 'USA'
and (lighting='DARK' or lighting='DARK_LIGHTED')
and person.type = 'PEDESTRIAN') as pedestrian
from accident
where country = 'USA'
and (lighting='DARK' or lighting='DARK_LIGHTED');
'''
def get_value(age, dictionary):
if age not in dictionary:
return 0
return dictionary[age]
if __name__ == '__main__':
print('ALL\tPEDESTRIAN')
usa_count = accident.execute_query(dark_query())
print('{0}\t{1}\t'.format(usa_count[0][0], usa_count[0][1]))
| 23.56
| 64
| 0.657612
| 241
| 1,767
| 4.709544
| 0.228216
| 0.096916
| 0.132159
| 0.142731
| 0.770925
| 0.770925
| 0.743612
| 0.743612
| 0.743612
| 0.743612
| 0
| 0.00563
| 0.195812
| 1,767
| 75
| 64
| 23.56
| 0.793103
| 0.021505
| 0
| 0.629032
| 0
| 0
| 0.72338
| 0.028356
| 0
| 0
| 0
| 0
| 0
| 1
| 0.096774
| false
| 0
| 0.016129
| 0.080645
| 0.225806
| 0.032258
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
78426b582adc55fae8a0b9b287317e16bc9cfd11
| 4,326
|
py
|
Python
|
tests/test_pydeduplines.py
|
Intsights/PyDeduplines
|
f8e38b2ce135469a670d8600e75d3f61447807f3
|
[
"MIT"
] | 31
|
2020-02-04T13:50:19.000Z
|
2021-07-20T13:21:31.000Z
|
tests/test_pydeduplines.py
|
Intsights/PyDeduplines
|
f8e38b2ce135469a670d8600e75d3f61447807f3
|
[
"MIT"
] | null | null | null |
tests/test_pydeduplines.py
|
Intsights/PyDeduplines
|
f8e38b2ce135469a670d8600e75d3f61447807f3
|
[
"MIT"
] | 3
|
2020-04-05T03:43:48.000Z
|
2021-07-20T13:21:44.000Z
|
import tempfile
import contextlib
import pytest
import random
import pydeduplines
@pytest.mark.parametrize(
'number_of_threads',
[
0,
1,
2,
]
)
@pytest.mark.parametrize(
'number_of_splits',
[
1,
2,
]
)
def test_compute_unique_lines_one_file(
number_of_threads,
number_of_splits,
):
with contextlib.ExitStack() as stack:
test_input_file_one = stack.enter_context(
tempfile.NamedTemporaryFile('wb')
)
test_output_file = stack.enter_context(
tempfile.NamedTemporaryFile('rb')
)
lines = [
f'line{i}'.encode()
for i in range(11000)
]
random.shuffle(lines)
test_input_file_one.file.write(b'\n'.join(lines * 2))
test_input_file_one.file.flush()
tempdir = tempfile.mkdtemp()
pydeduplines.compute_unique_lines(
working_directory=tempdir,
file_paths=[
test_input_file_one.name,
],
output_file_path=test_output_file.name,
number_of_splits=number_of_splits,
number_of_threads=number_of_threads,
)
unique_file_data = test_output_file.read()
assert sorted(unique_file_data.split(b'\n')[:-1]) == sorted(lines)
@pytest.mark.parametrize(
'number_of_threads',
[
0,
1,
2,
]
)
@pytest.mark.parametrize(
'number_of_splits',
[
1,
2,
]
)
def test_compute_unique_lines_two_files(
number_of_threads,
number_of_splits,
):
with contextlib.ExitStack() as stack:
test_input_file_one = stack.enter_context(
tempfile.NamedTemporaryFile('wb')
)
test_input_file_two = stack.enter_context(
tempfile.NamedTemporaryFile('wb')
)
test_output_file = stack.enter_context(
tempfile.NamedTemporaryFile('rb')
)
lines = [
f'line{i}'.encode()
for i in range(11000)
]
random.shuffle(lines)
test_input_file_one.file.write(b'\n'.join(lines[:10000]))
test_input_file_one.file.flush()
test_input_file_two.file.write(b'\n'.join(lines[:11000]))
test_input_file_two.file.flush()
tempdir = tempfile.mkdtemp()
pydeduplines.compute_unique_lines(
working_directory=tempdir,
file_paths=[
test_input_file_one.name,
test_input_file_two.name,
],
output_file_path=test_output_file.name,
number_of_splits=number_of_splits,
number_of_threads=number_of_threads,
)
unique_file_data = test_output_file.read()
assert sorted(unique_file_data.split(b'\n')[:-1]) == sorted(lines)
@pytest.mark.parametrize(
'number_of_threads',
[
0,
1,
2,
]
)
@pytest.mark.parametrize(
'number_of_splits',
[
1,
2,
]
)
def test_compute_added_lines(
number_of_threads,
number_of_splits,
):
with contextlib.ExitStack() as stack:
test_input_file_one = stack.enter_context(
tempfile.NamedTemporaryFile('wb')
)
test_input_file_two = stack.enter_context(
tempfile.NamedTemporaryFile('wb')
)
test_output_file = stack.enter_context(
tempfile.NamedTemporaryFile('rb')
)
lines = [
f'line{i}'.encode()
for i in range(11000)
]
random.shuffle(lines)
test_input_file_one.file.write(b'\n'.join(lines[:10000]))
test_input_file_one.file.flush()
test_input_file_two.file.write(b'\n'.join(lines[:11000]))
test_input_file_two.file.flush()
tempdir = tempfile.mkdtemp()
pydeduplines.compute_added_lines(
working_directory=tempdir,
first_file_path=test_input_file_one.name,
second_file_path=test_input_file_two.name,
output_file_path=test_output_file.name,
number_of_splits=number_of_splits,
number_of_threads=number_of_threads,
)
added_lines_file_data = test_output_file.read()
assert sorted(added_lines_file_data.split(b'\n')[:-1]) == sorted(lines[10000:])
| 25.298246
| 87
| 0.599168
| 493
| 4,326
| 4.888438
| 0.131846
| 0.079668
| 0.107884
| 0.079668
| 0.930705
| 0.915768
| 0.909129
| 0.909129
| 0.882158
| 0.882158
| 0
| 0.019504
| 0.30074
| 4,326
| 170
| 88
| 25.447059
| 0.77719
| 0
| 0
| 0.686667
| 0
| 0
| 0.035136
| 0
| 0
| 0
| 0
| 0
| 0.02
| 1
| 0.02
| false
| 0
| 0.033333
| 0
| 0.053333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
154e77b80713b849048590a71b23a6e7b21131f8
| 19,695
|
py
|
Python
|
MyFuncs.py
|
hkujy/BianLiShuang
|
d4f38310e166926dccffd096c04e6c288997c799
|
[
"MIT"
] | null | null | null |
MyFuncs.py
|
hkujy/BianLiShuang
|
d4f38310e166926dccffd096c04e6c288997c799
|
[
"MIT"
] | null | null | null |
MyFuncs.py
|
hkujy/BianLiShuang
|
d4f38310e166926dccffd096c04e6c288997c799
|
[
"MIT"
] | null | null | null |
"""
contain all the functions
"""
from os import times
import ParaScript as ps
import matplotlib.pyplot as plt
#TODO:
# 1. add fixed operation cost
# 2. add the effect of freqeuncy
class OperatorClass(object):
"""
operator
"""
def __init__(self, _id):
self.id = _id
self.price = 0.0
self.discount = 0.0
self.time = 0.0
self.opcost = 0.0
self.profit = 0.0
self.fxcost = 0.0
self.numpas = 0.0
# self.distance = 0.0
def set_price(self, _val):
self.price = _val
def cal_opcost(self, _para: ps.ParaClass()):
self.opcost = self.fxcost+_para.opCostPerPas*self.numpas
# def cal_profit(self, _para: ps.ParaClass()):
# if self.id == 1:
# self.profit = self.price*self.numpas-self.opcost
# if self.id == 2:
# self.profit = (self.price-self.discount)*self.numpas-self.opcost
# class UserClass(object):
# """
# class id
# 1: use company 1 + 3(without discount)
# 2: use company 1 + 2(with discount)
# """
# def __init__(self, _id):
# self.id = _id
# self.price = 0.0
# self.time = 0.0
# self.cost = 0.0
# def set_price(self, _val):
# self.price = _val
# # def set_dist(self, _val):
# # self.dist = _val
# def set_time(self, _val):
# self.time = _val
# def cal_cost(self, _para: ps.ParaClass(), _op):
# if self.id == 1:
# # self.cost = _op1.price+_op1.time+_para.b*_op1.dist+_op3.cost
# self.cost = _op[0].price+_para.val_of_time * _op[0].time+_para.val_of_time*_op[2].time
# elif self.id == 2:
# # self.cost = _op2.price+_op2.time+_para.b*_op2.dist-_para.la*_op2.discount_H
# self.cost = _op[1].price+_para.val_of_time * _op[1].time-_op[1].discount
# else:
# print("err in computing generalised cost")
def get_discont_val(t1, t2, _para):
"""
a general function to compute the discout value
input: travel time of the two companies
"""
val = _para.discount_ratio*(t1 - t2)
if val < 0:
input(
"The travel time of company op2 is greater than op 1, need to further examine")
return val
def get_x(_para: ps.ParaClass, _op):
"""
compute the value of x1 and x2 based on the formulation
"""
first_bracket = 1/(_para.g**2-_para.m**2)
second_bracket = _para.m * (_op[0].price+_para.val_of_time*_op[0].time - _para.a1+_para.val_of_time*_op[2].time)
third_bracket = _para.g * (_op[0].price+_para.val_of_time*_op[0].time + _op[1].price+_para.val_of_time*_op[1].time-_para.a2-_op[1].discount)
x1 = first_bracket*(second_bracket-third_bracket)
second_bracket = _para.m * (_op[1].price+_op[0].price+_para.val_of_time*_op[1].time + _para.val_of_time*_op[0].time- _para.a2-_op[1].discount)
third_bracket = _para.g * (_op[0].price+_para.val_of_time*_op[0].time - _para.a1+_para.val_of_time*_op[2].time)
x2 = first_bracket*(second_bracket - third_bracket)
_op[0].numpas = x1
_op[1].numpas = x2
# fourth_bracket=1/2*(_para.g**2-_para.m**2)
# fifth_bracket = (_para.μ*_op[4].distance+_para.val_of_time*_op[4].time - _para.a2-_op[4].discount*abs(_op[4].time -_op[3].time ))
# sixth_bracket = (_para.μ*_op[3].distance+_para.val_of_time*_op[3].time+_op[2].time)
# x4=fourth_bracket*( _para.m*fifth_bracket-_para.g *sixth_bracket)
# x5=fourth_bracket*(_para.m *sixth_bracket-_para.g*fifth_bracket)
# #_op[3].numpas = x4
# #_op[4].numpas = x5
# seventh_bracket=1/(_para.g**2-4*_para.m**2)*(_para.g**2-_para.m**2)
# eighth_bracket =(_para.a1-_para.val_of_time*_op[5].time-_para.μ*_op[5].distance-_op[2].time)
# ninth_bracket=(_para.a2-_para.val_of_time*_op[6].time-_para.μ*_op[6].distance+_op[6].discount*abs(_op[5].time -_op[6].time))
# tenth_bracket=(_para.a1-_para.μ*_op[5].distance-_para.val_of_time*_op[5].time)
# x6=seventh_bracket*(2*_para.m**3*eighth_bracket-_para.g*_para.m**2*ninth_bracket-_para.m*_para.g**2*tenth_bracket)
# x7=seventh_bracket*(2*_para.m**3*(ninth_bracket+_para.val_of_time*_op[5].time)-_para.g*_para.m**2*(tenth_bracket-_op[2].time)-_para.m*_para.g**2* ninth_bracket)
# _op[5].numpas = x6
# _op[6].numpas = x7
def cal_profit(_p:ps.ParaClass,_op):
"""
compute profit for the two operators
"""
_op[1].profit = (_op[1].price -_op[1].discount)*_op[1].numpas - _op[1].opcost
_op[0].profit = _op[0].price*(_op[1].numpas + _op[0].numpas) - _op[1].opcost
def update_costAndProfit(_p: ps.ParaClass, _op):
# step 1: compute the operation cost
_op[0].opcost = _op[0].fxcost + _p.opCostPerPas*_op[0].numpas
_op[1].opcost = _op[1].fxcost + _p.opCostPerPas*_op[1].numpas
# step 1: compute the profit
cal_profit(_p,_op)
def find_optimal_discount(dc,pf):
"""
find the optimal discount value
dc: discount list
pf: profit lst
"""
if len(dc) != len(pf):
print("Warning: the length of the input list do not equal")
input("--------need to debug----------------")
max_prof = -9999
max_prof_index = -1
for i in range(0,len(dc)):
if pf[i]>max_prof:
max_prof_index = i
max_prof = pf[i]
return dc[max_prof_index],pf[max_prof_index]
def test_one_ParaSet(case_id: int, _para: ps.ParaClass()):
"""
calculate one combination of parameters
"""
x1_list = []
x2_list = []
total_demand = []
discount = []
op1_profit = []
op2_profit = []
op1_cost = []
op2_cost = []
operators = []
operators.append(OperatorClass(_id=1))
operators.append(OperatorClass(_id=2))
operators.append(OperatorClass(_id=3))
# price of the two companies
operators[0].price = _para.price[0]
operators[1].price = _para.price[1]
# travel time of the two companies
operators[0].time = _para.travel_time[0]
operators[1].time = _para.travel_time[1]
# travel timecos of the third company
operators[2].time = _para.travel_time[2]
#operators[3].time = _para.travel_time[3]
# operators[4].time = _para.travel_time[4]
#operators[5].time = _para.travel_time[5]
operators[0].fxcost = _para.fxcost[0]
operators[1].fxcost = _para.fxcost[1]
for i in range(0, 10):
_para.discount_ratio = 0.05*(i+1)
operators[1].discount = get_discont_val(
operators[0].time, operators[1].time, _para)
discount_val= get_discont_val(
operators[0].time, operators[1].time, _para)
discount.append(discount_val)
if operators[1].price - operators[1].discount < 0:
print("error: the op2 price after discout is negative")
input()
get_x(_para, operators)
# print("{0},{1}".format(operators[0].numpas,operators[1].numpas))
update_costAndProfit(_para, operators)
x1_list.append(operators[0].numpas)
x2_list.append(operators[1].numpas)
total_demand.append(operators[0].numpas + operators[1].numpas)
op1_profit.append(operators[0].profit)
op2_profit.append(operators[1].profit)
op1_cost.append(operators[0].opcost)
op2_cost.append(operators[1].opcost)
for i in range(0,len(x1_list)):
with open('TestResults.csv', 'a') as f:
# print("TestId,Price1,Price2,Time1,Time2,Time3,DiscountRatio,m,g,x1,x2,profit1,profit2,opCost1,opCost2",file=f)
print("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},{10},{11},{12},{13},{14}".format
(case_id,_para.price[0],_para.price[1],_para.travel_time[0],_para.travel_time[1],_para.travel_time[2],
discount[i],_para.m,_para.g,x1_list[i],x2_list[i],op1_profit[i],op2_profit[i],
op1_cost[i],op2_cost[i]),file=f)
opt_disc, opt_profit = find_optimal_discount(discount,op2_profit)
print("Optimal Discount = {0}, Optimal Profit = {1}".format(opt_disc, opt_profit))
# plt.plot(op2_profit)
# plt.ion()
# plt.pause(2)
# plt.close()
# plt.plot(op1_profit)
# plt.ion()
# plt.pause(2)
# plt.close()
plt.plot(x1_list, label="x1")
plt.plot(x2_list, label="x2")
# plt.plot(total_demand, label="total")
# plt.title("Demand")
xtick = plt.gca().get_xticks()
ytick = plt.gca().get_yticks()
xtick = discount
plt.gca().set_xticklabels(xtick, fontsize=10,fontname='Times New Roman')
plt.gca().set_yticklabels(ytick, fontsize=10,fontname='Times New Roman')
xmajorFormatter = plt.FormatStrFormatter('%.1f')
plt.gca().xaxis.set_major_formatter(xmajorFormatter)
plt.gca().set_xlabel("Discount Value",fontsize=12,fontname='Times New Roman')
plt.legend()
plt.ion()
plt.pause(1)
plt.tight_layout()
plt.savefig("Base_Demand_Case_"+str(case_id)+".png",bbox_inches='tight',dpi=600)
plt.close()
plt.plot(op1_profit, label='op1')
plt.plot(op2_profit, label='op2')
plt.title("profit", fontsize = 12, fontname ='Times New Roman')
xtick = plt.gca().get_xticks()
ytick = plt.gca().get_yticks()
xtick = discount
plt.gca().set_xticklabels(xtick, fontsize=10,fontname='Times New Roman')
plt.gca().set_yticklabels(ytick, fontsize=10,fontname='Times New Roman')
xmajorFormatter = plt.FormatStrFormatter('%.1f')
plt.gca().xaxis.set_major_formatter(xmajorFormatter)
plt.gca().set_xlabel("Discount Value",fontsize=12,fontname='Times New Roman')
plt.legend()
plt.ion()
plt.pause(1)
plt.tight_layout()
plt.savefig("Base_Profit_Case_"+str(case_id)+".png",bbox_inches='tight',dpi=600)
plt.close()
# plot op 2
plt.plot(op2_profit)
plt.title("Operator 2 Profit",fontsize=12, fontname='Times New Roman')
xtick = plt.gca().get_xticks()
ytick = plt.gca().get_yticks()
xtick = discount
plt.gca().set_xticklabels(xtick, fontsize=10,fontname='Times New Roman')
plt.gca().set_yticklabels(ytick, fontsize=10,fontname='Times New Roman')
xmajorFormatter = plt.FormatStrFormatter('%.1f')
plt.gca().xaxis.set_major_formatter(xmajorFormatter)
plt.gca().set_xlabel("Discount Value",fontsize=12,fontname='Times New Roman')
plt.gca().set_ylabel("Profit",fontsize=12,fontname='Times New Roman')
plt.ion()
plt.pause(1)
plt.savefig("Base_Profit_Op2_Case_"+str(case_id)+".png",bbox_inches='tight',dpi=600)
plt.close()
# plot op1
plt.plot(op1_profit)
xtick = plt.gca().get_xticks()
ytick = plt.gca().get_yticks()
xtick = discount
plt.gca().set_xticklabels(xtick, fontsize=10,fontname='Times New Roman')
plt.gca().set_yticklabels(ytick, fontsize=10,fontname='Times New Roman')
xmajorFormatter = plt.FormatStrFormatter('%.1f')
plt.gca().xaxis.set_major_formatter(xmajorFormatter)
plt.gca().set_xlabel("Discount Value",fontsize=12,fontname='Times New Roman')
plt.gca().set_ylabel("Profit",fontsize=12,fontname='Times New Roman')
plt.title("Operator 1 Profit",fontsize =12, fontname ='Times New Roman')
plt.ion()
plt.pause(1)
plt.savefig("Base_Profit_Op1_Case_"+str(case_id)+".png",bbox_inches='tight',dpi=600)
plt.close()
# step 3: plot
def get_x_share_mon(_para: ps.ParaClass, _op):
"""
"""
first_denominator= 1/(2*(_para.g**2-_para.m**2))
first_brack_x1 = (_para.val_of_time*_op[0].time - _para.a1-_op[2].time )
second_brack_x1 = (_para.val_of_time*_op[1].time -_para.a2-_op[1].discount)
x1 = first_denominator*(_para.m*first_brack_x1-_para.g*second_brack_x1)
x2 = first_denominator*(_para.m*second_brack_x1-_para.g*first_brack_x1)
_op[0].numpas = x1
_op[1].numpas = x2
def get_price_share_mon(_para: ps.ParaClass, _op):
"""
"""
_op[0].price = 0.5*(_para.a1-_para.val_of_time*_op[0].time-_op[2].time)
_op[1].price = 0.5*(_para.a2-_para.val_of_time*_op[1].time+_op[1].discount)
def test_one_share(case_id: int, _para: ps.ParaClass()):
x1_list = []
x2_list = []
total_demand = []
discount = []
op1_profit = []
op2_profit = []
total_profit = []
op1_cost = []
op2_cost = []
operators = []
operators.append(OperatorClass(_id=1))
operators.append(OperatorClass(_id=2))
operators.append(OperatorClass(_id=3))
# price of the two companies
operators[0].price = _para.price[0]
operators[1].price = _para.price[1]
# travel time of the two companies
operators[0].time = _para.travel_time[0]
operators[1].time = _para.travel_time[1]
# travel timecos of the third company
operators[2].time = _para.travel_time[2]
#operators[3].time = _para.travel_time[3]
# operators[4].time = _para.travel_time[4]
#operators[5].time = _para.travel_time[5]
operators[0].fxcost = _para.fxcost[0]
operators[1].fxcost = _para.fxcost[1]
for i in range(0, 15):
_para.discount_ratio = 0.05*(i+1)
operators[1].discount = get_discont_val(
operators[0].time, operators[1].time, _para)
discount_val= get_discont_val(
operators[0].time, operators[1].time, _para)
discount.append(discount_val)
if operators[1].price - operators[1].discount < 0:
print("error: the op2 price after discout is negative")
input()
# get_x(_para, operators)
get_x_share_mon(_para, operators)
# print("price {0},{1}".format(operators[0].price,operators[1].price))
get_price_share_mon(_para, operators)
# print("price {0},{1}".format(operators[0].price,operators[1].price))
update_costAndProfit(_para, operators)
# print("{0},{1}".format(operators[0].numpas,operators[1].numpas))
# update_costAndProfit(_para, operators)
x1_list.append(operators[0].numpas)
x2_list.append(operators[1].numpas)
# total_demand.append(operators[0].numpas + operators[1].numpas)
op1_profit.append(operators[0].profit)
op2_profit.append(operators[1].profit)
total_profit.append(operators[0].profit+operators[1].profit)
op1_cost.append(operators[0].opcost)
op2_cost.append(operators[1].opcost)
plt.plot(x1_list, label="x1")
plt.plot(x2_list, label="x2")
# plt.plot(total_demand, label="total")
plt.title("Demand")
plt.legend()
plt.ion()
plt.pause(1)
plt.tight_layout()
plt.savefig("ShareMono_Demand_Case_"+str(case_id)+".png",bbox_inches='tight',dpi=600)
plt.close()
plt.plot(op2_profit)
plt.title("Op2 Profit")
plt.ion()
plt.pause(1)
plt.savefig("ShareMono_Profit_Op2_Case_"+str(case_id)+".png",bbox_inches='tight',dpi=600)
plt.close()
plt.plot(op1_profit)
plt.title("Op1 Profit")
plt.ion()
plt.pause(1)
plt.savefig("ShareMono_Profit_Op1_Case_"+str(case_id)+".png",bbox_inches='tight',dpi=600)
plt.close()
plt.plot(total_profit)
plt.title("TotalProfit")
plt.ion()
plt.pause(1)
plt.savefig("ShareMono_Profit_Total_Case_"+str(case_id)+".png",bbox_inches='tight',dpi=600)
plt.close()
for i in range(0,len(x1_list)):
with open('TestResults.csv', 'a+') as f:
print("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},{10},{11},{12},{13},{14}".format
(case_id,_para.price[0],_para.price[1],_para.travel_time[0],_para.travel_time[1],_para.travel_time[2],
_para.discount_ratio,_para.m,_para.g,x1_list[i],x2_list[i],op1_profit[i],op2_profit[i],
op1_cost[i],op2_cost[i]),file=f)
opt_disc, opt_profit = find_optimal_discount(discount,total_profit)
print("Optimal Discount = {0}, Optimal Profit = {1}".format(opt_disc, opt_profit))
#######################################################################
def get_price_Betran(_para: ps.ParaClass, _op):
"""
price formula
"""
def get_x_Betran(_para: ps.ParaClass, _op):
"""
x formula
"""
def test_one_Bertand(case_id: int, _para: ps.ParaClass()):
"""
"""
x1_list = []
x2_list = []
total_demand = []
discount = []
op1_profit = []
op2_profit = []
total_profit = []
op1_cost = []
op2_cost = []
operators = []
operators.append(OperatorClass(_id=1))
operators.append(OperatorClass(_id=2))
operators.append(OperatorClass(_id=3))
# price of the two companies
operators[0].price = _para.price[0]
operators[1].price = _para.price[1]
# travel time of the two companies
operators[0].time = _para.travel_time[0]
operators[1].time = _para.travel_time[1]
# travel timecos of the third company
operators[2].time = _para.travel_time[2]
#operators[3].time = _para.travel_time[3]
# operators[4].time = _para.travel_time[4]
#operators[5].time = _para.travel_time[5]
operators[0].fxcost = _para.fxcost[0]
operators[1].fxcost = _para.fxcost[1]
for i in range(0, 15):
_para.discount_ratio = 0.05*(i+1)
operators[1].discount = get_discont_val(
operators[0].time, operators[1].time, _para)
discount_val= get_discont_val(
operators[0].time, operators[1].time, _para)
discount.append(discount_val)
if operators[1].price - operators[1].discount < 0:
print("error: the op2 price after discout is negative")
input()
get_price_Betran(_para, operators)
get_x_Betran(_para, operators)
update_costAndProfit(_para, operators)
# print("{0},{1}".format(operators[0].numpas,operators[1].numpas))
# update_costAndProfit(_para, operators)
x1_list.append(operators[0].numpas)
x2_list.append(operators[1].numpas)
# total_demand.append(operators[0].numpas + operators[1].numpas)
op1_profit.append(operators[0].profit)
op2_profit.append(operators[1].profit)
total_profit.append(operators[0].profit+operators[1].profit)
# op1_cost.append(operators[0].opcost)
# op2_cost.append(operators[1].opcost)
plt.plot(x1_list, label="x1")
plt.plot(x2_list, label="x2")
# plt.plot(total_demand, label="total")
plt.title("Demand")
plt.legend()
plt.ion()
plt.pause(1)
plt.tight_layout()
plt.savefig("Bert_Demand_Case_"+str(case_id)+".png",bbox_inches='tight',dpi=600)
plt.close()
plt.plot(op2_profit)
plt.title("Op2 Profit")
plt.ion()
plt.pause(1)
plt.savefig("Bert_Profit_Op2_Case_"+str(case_id)+".png",bbox_inches='tight',dpi=600)
plt.close()
plt.plot(op1_profit)
plt.title("Op1 Profit")
plt.ion()
plt.pause(1)
plt.savefig("Bert_Profit_Op1_Case_"+str(case_id)+".png",bbox_inches='tight',dpi=600)
plt.close()
plt.plot(total_profit)
plt.title("TotalProfit")
plt.ion()
plt.pause(1)
plt.savefig("Bert_Profit_Total_Case_"+str(case_id)+".png",bbox_inches='tight',dpi=600)
plt.close()
for i in range(0,len(x1_list)):
with open('TestResults.csv', 'a+') as f:
# print("TestId,Price1,Price2,Time1,Time2,Time3,DiscountRatio,m,g,x1,x2,profit1,profit2,opCost1,opCost2",file=f)
print("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},{10},{11},{12},{13},{14}".format
(case_id,_para.price[0],_para.price[1],_para.travel_time[0],_para.travel_time[1],_para.travel_time[2],
_para.discount_ratio,_para.m,_para.g,x1_list[i],x2_list[i],op1_profit[i],op2_profit[i],
op1_cost[i],op2_cost[i]),file=f)
# opt_disc, opt_profit = find_optimal_discount(discount,op2_profit)
# print("Optimal Discount = {0}, Optimal Profit = {1}".format(opt_disc, opt_profit))
| 37.020677
| 165
| 0.635491
| 2,880
| 19,695
| 4.101389
| 0.082639
| 0.036404
| 0.032001
| 0.023112
| 0.797833
| 0.773789
| 0.742465
| 0.731375
| 0.717406
| 0.711649
| 0
| 0.041482
| 0.198274
| 19,695
| 532
| 166
| 37.020677
| 0.706586
| 0.241787
| 0
| 0.717391
| 0
| 0.009317
| 0.100975
| 0.028988
| 0
| 0
| 0
| 0.00188
| 0
| 1
| 0.046584
| false
| 0
| 0.009317
| 0
| 0.065217
| 0.02795
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
157c5fcc329a8ef19b0629f912b4f2ac69932eec
| 218
|
py
|
Python
|
pyshorteners/exceptions.py
|
relrod/pyshorteners
|
f6a4a98db77ce7858c4b2a2999cd89dba3b4904d
|
[
"MIT"
] | 1
|
2021-03-24T11:54:30.000Z
|
2021-03-24T11:54:30.000Z
|
pyshorteners/exceptions.py
|
gauravssnl/pyshorteners
|
f6a4a98db77ce7858c4b2a2999cd89dba3b4904d
|
[
"MIT"
] | null | null | null |
pyshorteners/exceptions.py
|
gauravssnl/pyshorteners
|
f6a4a98db77ce7858c4b2a2999cd89dba3b4904d
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from __future__ import unicode_literals
class UnknownShortenerException(Exception):
pass
class ShorteningErrorException(Exception):
pass
class ExpandingErrorException(Exception):
pass
| 14.533333
| 43
| 0.788991
| 20
| 218
| 8.35
| 0.7
| 0.233533
| 0.215569
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005435
| 0.155963
| 218
| 14
| 44
| 15.571429
| 0.902174
| 0.059633
| 0
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.428571
| 0.142857
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
1590222b2c3112ad594f550e10739fcb8d63d5a6
| 70
|
py
|
Python
|
tests/test_cgnswrap.py
|
chiao45/cgns_wrapper
|
b46acbd0e2ee2eb83cf5454190f03786a7efe5f0
|
[
"MIT"
] | null | null | null |
tests/test_cgnswrap.py
|
chiao45/cgns_wrapper
|
b46acbd0e2ee2eb83cf5454190f03786a7efe5f0
|
[
"MIT"
] | null | null | null |
tests/test_cgnswrap.py
|
chiao45/cgns_wrapper
|
b46acbd0e2ee2eb83cf5454190f03786a7efe5f0
|
[
"MIT"
] | null | null | null |
import cgns_wrapper
def test_pycgns():
cgns_wrapper.run_tests()
| 11.666667
| 28
| 0.757143
| 10
| 70
| 4.9
| 0.8
| 0.44898
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157143
| 70
| 5
| 29
| 14
| 0.830508
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
1591b86b1d9335949b37b9a3f171b015caecab5f
| 89
|
py
|
Python
|
mlib/fig/fig_templates.py
|
mgroth0/mlib
|
0442ed51eab417b6972f885605afd351892a3a9a
|
[
"MIT"
] | 1
|
2020-06-16T17:26:45.000Z
|
2020-06-16T17:26:45.000Z
|
mlib/fig/fig_templates.py
|
mgroth0/mlib
|
0442ed51eab417b6972f885605afd351892a3a9a
|
[
"MIT"
] | null | null | null |
mlib/fig/fig_templates.py
|
mgroth0/mlib
|
0442ed51eab417b6972f885605afd351892a3a9a
|
[
"MIT"
] | null | null | null |
from mlib.boot.stream import arr
from mlib.fig.PlotData import DoubleBarOrBox, PlotData
| 22.25
| 54
| 0.831461
| 13
| 89
| 5.692308
| 0.692308
| 0.216216
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11236
| 89
| 3
| 55
| 29.666667
| 0.936709
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
159e8d6b46dcc047bd34ac27ed68583d41ace03a
| 41
|
py
|
Python
|
doctorf/serializers.py
|
ninemoreminutes/doctorf
|
4fff5bca82001fdf0a22db62f4361b8c1b3b5c5c
|
[
"BSD-3-Clause"
] | 1
|
2019-11-13T16:01:12.000Z
|
2019-11-13T16:01:12.000Z
|
doctorf/serializers.py
|
ninemoreminutes/doctorf
|
4fff5bca82001fdf0a22db62f4361b8c1b3b5c5c
|
[
"BSD-3-Clause"
] | 3
|
2020-06-05T17:16:16.000Z
|
2021-06-10T18:28:39.000Z
|
doctorf/serializers.py
|
ninemoreminutes/doctorf
|
4fff5bca82001fdf0a22db62f4361b8c1b3b5c5c
|
[
"BSD-3-Clause"
] | null | null | null |
# Doctor F
from .fields import * # noqa
| 13.666667
| 29
| 0.658537
| 6
| 41
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.243902
| 41
| 2
| 30
| 20.5
| 0.870968
| 0.317073
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
15bddd053bde59e8741300bb740d97b182241f08
| 160
|
py
|
Python
|
plotly/graph_objs/parcats/line/colorbar/__init__.py
|
mprostock/plotly.py
|
3471c3dfbf783927c203c676422260586514b341
|
[
"MIT"
] | 12
|
2020-04-18T18:10:22.000Z
|
2021-12-06T10:11:15.000Z
|
plotly/graph_objs/parcats/line/colorbar/__init__.py
|
Vesauza/plotly.py
|
e53e626d59495d440341751f60aeff73ff365c28
|
[
"MIT"
] | 27
|
2020-04-28T21:23:12.000Z
|
2021-06-25T15:36:38.000Z
|
plotly/graph_objs/parcats/line/colorbar/__init__.py
|
Vesauza/plotly.py
|
e53e626d59495d440341751f60aeff73ff365c28
|
[
"MIT"
] | 6
|
2020-04-18T23:07:08.000Z
|
2021-11-18T07:53:06.000Z
|
from ._title import Title
from plotly.graph_objs.parcats.line.colorbar import title
from ._tickformatstop import Tickformatstop
from ._tickfont import Tickfont
| 32
| 57
| 0.85625
| 21
| 160
| 6.333333
| 0.52381
| 0.165414
| 0.225564
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 160
| 4
| 58
| 40
| 0.923611
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ec74806384d9b1e143407d7488c5ab24058fbb2b
| 6,755
|
py
|
Python
|
networkx_plotting.py
|
SYRROCA/SYRROCAIMS
|
6ac24f17444ed0a0b973748756ac6c7b3a2b138f
|
[
"BSD-3-Clause"
] | null | null | null |
networkx_plotting.py
|
SYRROCA/SYRROCAIMS
|
6ac24f17444ed0a0b973748756ac6c7b3a2b138f
|
[
"BSD-3-Clause"
] | null | null | null |
networkx_plotting.py
|
SYRROCA/SYRROCAIMS
|
6ac24f17444ed0a0b973748756ac6c7b3a2b138f
|
[
"BSD-3-Clause"
] | null | null | null |
/*
* Software Name : SYRROCA
* Version: 1.0
* SPDX-FileCopyrightText: Copyright (c) 2021 Orange
* SPDX-License-Identifier: BSD-3-Clause
*
* This software is distributed under the BSD 3-Clause "New" or "Revised" License,
* the text of which is available at https://spdx.org/licenses/BSD-3-Clause.html
* or see the "license.txt" file for more details.
*
* Author: Alessio Diamanti
*/
import networkx as nx
import matplotlib.pyplot as plt
# Generate the new state label. Returns the current label and updates silently dictMapping
def generate_label(node_name,dictMapping):
# global countLabels
# global dictMapping
if not node_name in dictMapping and not node_name == 'Nominal':
dictMapping.update({node_name:'S' + str(len(dictMapping))})
return dictMapping[node_name]
#
def draw_networx_graph(G, graph,fname,to_div,sign,prog,pad,pad_leg,dictMapping):
"""
Plots network graph with networkx
Keyword arguments:
G The graph to plot
graph Pydot graph to extract metadata
fname File name to save to
to_div Quotient to compute percentage/per-mille
sign "‰" or "%"
prog Graphviz program to compute node position
pad Offset from node center to position node name and frequency inside node circle
pad_leg Offset to position the legend
dictMapping Dictionary with labels mapping
"""
nodesG_old = G.nodes
nodesG = [node.replace('__', ' & ') for node in G.nodes]
mappingN = dict(zip([node for node in G.nodes], nodesG))
G = nx.relabel_nodes(G, mappingN)
plt.figure(figsize=(9.6, 5.952))
pos = nx.nx_agraph.graphviz_layout(G, prog=prog)
pos3Nom = {'Nominal': (pos['Nominal'][0], pos['Nominal'][1] + pad)}
pos2 = {elem: (pos[elem][0], pos[elem][1] - pad) for idx, elem in enumerate(pos)}
pos3 = {elem: (pos[elem][0], pos[elem][1] + pad) for idx, elem in enumerate(pos) if idx != 0}
node_labels = [generate_label(node,dictMapping) for node in list(G.nodes)]
mappingNom = {nodesG[0]: node_labels[0]}
mapping = dict(zip(nodesG[1:], node_labels[1:]))
mappingXL = dict(
zip(nodesG,
[str(round(n.get('xlabel') / to_div, 2)) + sign for n in graph.get_nodes() if n.get_name() in nodesG_old]))
weights = []
for e in list(G.edges):
src = e[0]
dst = e[1]
to_append = graph.get_edge(src.replace(' & ', '__'), dst.replace(' & ', '__'))[0].get_penwidth()
print(src,dst, to_append)
weights.append(to_append)
print(weights)
for idx, n in enumerate(G.nodes):
if idx == 0:
nx.draw_networkx_nodes(G, pos=pos, nodelist=[n], label=dictMapping[n] + ' ' + n.replace('__', ' & '),
node_size=2200, node_color='#FFFFFF', edgecolors='#000000')
else:
nx.draw_networkx_nodes(G, pos=pos, nodelist=[n], label=dictMapping[n] + ' ' +
n.replace('__', ' & ').upper().replace('NETWORK','NET').replace('MEMORY', 'MEM'),
node_size=2200, node_color='#FFFFFF',edgecolors='#000000')
nx.draw_networkx_labels(list(G.nodes)[0], pos3Nom, mappingNom, font_size=14, font_color='#008000')
nx.draw_networkx_labels(list(G.nodes)[:1], pos3, mapping, font_size=14, font_color='#FF0000')
nx.draw_networkx_labels(G, pos2, mappingXL, font_size=11)
nx.draw_networkx_edges(G, pos=pos, width=weights, connectionstyle='arc3, rad = 0.2', min_target_margin=26,
label=weights, min_source_margin=10)
lgd = plt.legend(bbox_to_anchor=(0.5, pad_leg), loc='lower center', prop=dict(weight='bold', size=9), fontsize=10,
handlelength=0, handletextpad=0, fancybox=True, ncol=2)
for item in lgd.legendHandles:
item.set_visible(False)
plt.savefig(fname + ".png", format="PNG", dpi=600, bbox_extra_artists=[lgd], bbox_inches='tight')
return dictMapping
# Plots network graph with networkx. Just a commodity method to plot bigger circle for the training (long per-mille string)
def draw_networx_graph_train(G, graph,fname,to_div,sign,prog,pad,pad_leg,dictMapping):
nodesG_old = G.nodes
nodesG = [node.replace('__', ' & ') for node in G.nodes]
mappingN = dict(zip([node for node in G.nodes], nodesG))
G = nx.relabel_nodes(G, mappingN)
plt.figure(figsize=(9.6,5.952))
pos = nx.nx_agraph.graphviz_layout(G, prog=prog) # nx.spring_layout(G)
pos3Nom = {'Nominal':(pos['Nominal'][0], pos['Nominal'][1] + pad)}
pos2 = {elem: (pos[elem][0], pos[elem][1] - pad) for idx,elem in enumerate(pos)}
pos3 = {elem: (pos[elem][0], pos[elem][1] + pad) for idx,elem in enumerate(pos) if idx != 0}
node_labels = [generate_label(node,dictMapping) for node in list(G.nodes)]
mappingNom = {nodesG[0]: node_labels[0]}
mapping = dict(zip(nodesG[1:], node_labels[1:]))
mappingXL = dict(
zip(nodesG, [str(round(n.get('xlabel')/to_div,2))+sign for n in graph.get_nodes() if n.get_name() in nodesG_old ]))
weights = []
for e in list(G.edges):
src = e[0]
dst = e[1]
weights.append(graph.get_edge(src.replace(' & ', '__'), dst.replace(' & ', '__'))[0].get_penwidth())
for idx, n in enumerate(G.nodes):
if idx == 0:
nx.draw_networkx_nodes(G, pos=pos, nodelist=[n], label=dictMapping[n] + ' ' + n.replace('__', ' & '),
node_size=3000, node_color='#FFFFFF', edgecolors='#000000')
else:
nx.draw_networkx_nodes(G, pos=pos, nodelist=[n], label=dictMapping[n]+' '+
n.replace('__', ' & ').upper().replace('NETWORK','NET').replace('MEMORY','MEM'),
node_size=2200, node_color='#FFFFFF', edgecolors='#000000' )
nx.draw_networkx_labels(list(G.nodes)[0], pos3Nom, mappingNom, font_size=14, font_color='#008000')
nx.draw_networkx_labels(list(G.nodes)[:1], pos3, mapping, font_size=14, font_color='#FF0000')
nx.draw_networkx_labels(G, pos2, mappingXL, font_size=11)
nx.draw_networkx_edges(G, pos=pos, width=weights, connectionstyle='arc3, rad = 0.2',min_target_margin = 26,label=weights,min_source_margin=10 )
lgd = plt.legend(bbox_to_anchor=(0.5, pad_leg), loc='lower center', prop=dict(weight='bold',size=9),fontsize=10,handlelength=0, handletextpad=0, fancybox=True,ncol=2)
for item in lgd.legendHandles:
item.set_visible(False)
plt.savefig(fname+".png", format="PNG", dpi=600, bbox_extra_artists=[lgd], bbox_inches='tight')
return dictMapping
| 54.475806
| 171
| 0.623834
| 939
| 6,755
| 4.347178
| 0.232162
| 0.020578
| 0.041156
| 0.029397
| 0.733954
| 0.733954
| 0.719745
| 0.719745
| 0.718765
| 0.718765
| 0
| 0.034032
| 0.230052
| 6,755
| 123
| 172
| 54.918699
| 0.750625
| 0.039674
| 0
| 0.642857
| 0
| 0
| 0.059483
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.020408
| null | null | 0.020408
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ec8fb02287cad3ff9807d4f7b4a66b59e0088cb2
| 269
|
py
|
Python
|
pub_data_visualization/outages/plot/__init__.py
|
cre-os/pub-data-visualization
|
e5ec45e6397258646290836fc1a3b39ad69bf266
|
[
"MIT"
] | 10
|
2020-10-08T11:35:49.000Z
|
2021-01-22T16:47:59.000Z
|
pub_data_visualization/outages/plot/__init__.py
|
l-leo/pub-data-visualization
|
68eea00491424581b057495a7f0f69cf74e16e7d
|
[
"MIT"
] | 3
|
2021-03-15T14:26:43.000Z
|
2021-12-02T15:27:49.000Z
|
pub_data_visualization/outages/plot/__init__.py
|
cre-dev/pub-data-visualization
|
229bb7a543684be2cb06935299345ce3263da946
|
[
"MIT"
] | 1
|
2021-01-22T16:47:10.000Z
|
2021-01-22T16:47:10.000Z
|
"""
Module to plot outages data.
"""
from .animated_availability import *
from .evolution_mean_availability import *
from .expected_program import *
from .incremental_programs import *
from .regression_delays import *
| 22.416667
| 44
| 0.64684
| 26
| 269
| 6.461538
| 0.653846
| 0.238095
| 0.261905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.289963
| 269
| 12
| 45
| 22.416667
| 0.879581
| 0.104089
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ec913cde7ebc969c7ead7388f0570b9e7961d8b0
| 163
|
py
|
Python
|
sls/hover.py
|
jayvdb/sls
|
c788815898b3665cfe5b316b7780190cb9bdacb9
|
[
"Apache-2.0"
] | null | null | null |
sls/hover.py
|
jayvdb/sls
|
c788815898b3665cfe5b316b7780190cb9bdacb9
|
[
"Apache-2.0"
] | null | null | null |
sls/hover.py
|
jayvdb/sls
|
c788815898b3665cfe5b316b7780190cb9bdacb9
|
[
"Apache-2.0"
] | null | null | null |
class Hover():
"""
Generate hover information
"""
def hover(self, ws, doc, position):
# return {'contents': '.hover.'}
return None
| 20.375
| 40
| 0.539877
| 16
| 163
| 5.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.306748
| 163
| 7
| 41
| 23.285714
| 0.778761
| 0.355828
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
eca07c20f9a9fc385ead5678578ba305b4bcd387
| 47
|
py
|
Python
|
tools/Polygraphy/polygraphy/backend/common/__init__.py
|
leo0519/TensorRT
|
498dcb009fe4c2dedbe9c61044d3de4f3c04a41b
|
[
"Apache-2.0"
] | 5,249
|
2019-06-17T17:20:34.000Z
|
2022-03-31T17:56:05.000Z
|
tools/Polygraphy/polygraphy/backend/common/__init__.py
|
leo0519/TensorRT
|
498dcb009fe4c2dedbe9c61044d3de4f3c04a41b
|
[
"Apache-2.0"
] | 1,721
|
2019-06-17T18:13:29.000Z
|
2022-03-31T16:09:53.000Z
|
tools/Polygraphy/polygraphy/backend/common/__init__.py
|
leo0519/TensorRT
|
498dcb009fe4c2dedbe9c61044d3de4f3c04a41b
|
[
"Apache-2.0"
] | 1,414
|
2019-06-18T04:01:17.000Z
|
2022-03-31T09:16:53.000Z
|
from polygraphy.backend.common.loader import *
| 23.5
| 46
| 0.829787
| 6
| 47
| 6.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085106
| 47
| 1
| 47
| 47
| 0.906977
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
eca7ad2658211ea8bbb17e982650008c7bdac1eb
| 222
|
py
|
Python
|
eu.modelwriter.smtlib.texteditor/lib/z3-4.8.4/win/python/z3/__init__.py
|
ModelWriter/smtlib-tool
|
b075a8b6bf6188134a50f3884aad480d468fe558
|
[
"MIT"
] | null | null | null |
eu.modelwriter.smtlib.texteditor/lib/z3-4.8.4/win/python/z3/__init__.py
|
ModelWriter/smtlib-tool
|
b075a8b6bf6188134a50f3884aad480d468fe558
|
[
"MIT"
] | null | null | null |
eu.modelwriter.smtlib.texteditor/lib/z3-4.8.4/win/python/z3/__init__.py
|
ModelWriter/smtlib-tool
|
b075a8b6bf6188134a50f3884aad480d468fe558
|
[
"MIT"
] | null | null | null |
from .z3 import *
from . import z3num
from . import z3poly
from . import z3printer
from . import z3rcf
from . import z3types
from . import z3util
# generated files
from . import z3core
from . import z3consts
| 17.076923
| 24
| 0.711712
| 29
| 222
| 5.448276
| 0.448276
| 0.506329
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.053254
| 0.238739
| 222
| 12
| 25
| 18.5
| 0.881657
| 0.067568
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.111111
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ece471d1c9cf27c090f794409cc8a9006870d54b
| 46
|
py
|
Python
|
helper2.py
|
bvt2nc/cs3240-labdemo
|
76cb93a98daf8b1934b6faaf1e641e2380235736
|
[
"MIT"
] | null | null | null |
helper2.py
|
bvt2nc/cs3240-labdemo
|
76cb93a98daf8b1934b6faaf1e641e2380235736
|
[
"MIT"
] | null | null | null |
helper2.py
|
bvt2nc/cs3240-labdemo
|
76cb93a98daf8b1934b6faaf1e641e2380235736
|
[
"MIT"
] | null | null | null |
def greeting2(msg):
print("Greeting2" + msg)
| 15.333333
| 25
| 0.695652
| 6
| 46
| 5.333333
| 0.666667
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05
| 0.130435
| 46
| 2
| 26
| 23
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0.195652
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
01c3c6e52fe02038072dd45f1b2049ef35bcd5c2
| 132
|
py
|
Python
|
pit_package/pit_poland/data_import/tests/__init__.py
|
Qertan/NYPD21Z
|
24ad8f22b6cdc6f424470d00e3528ca49c8fd213
|
[
"BSD-2-Clause"
] | 1
|
2022-02-22T15:15:27.000Z
|
2022-02-22T15:15:27.000Z
|
pit_package/pit_poland/data_import/tests/__init__.py
|
Qertan/NYPD21Z
|
24ad8f22b6cdc6f424470d00e3528ca49c8fd213
|
[
"BSD-2-Clause"
] | null | null | null |
pit_package/pit_poland/data_import/tests/__init__.py
|
Qertan/NYPD21Z
|
24ad8f22b6cdc6f424470d00e3528ca49c8fd213
|
[
"BSD-2-Clause"
] | null | null | null |
from .test_import import test_import_pit
from .test_import import test_import_ppl
__all__ = ("test_import_pit", "test_import_ppl")
| 26.4
| 48
| 0.825758
| 21
| 132
| 4.52381
| 0.285714
| 0.631579
| 0.294737
| 0.421053
| 0.631579
| 0.631579
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098485
| 132
| 5
| 48
| 26.4
| 0.798319
| 0
| 0
| 0
| 0
| 0
| 0.225564
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
bf000ddd66d83e322076d509432d2ac5b070b155
| 59
|
py
|
Python
|
timepiece/contracts/tests/__init__.py
|
icekernel/django-timepiece
|
883cfcd50da3d1b411a43f3b6116342b49117ace
|
[
"MIT"
] | null | null | null |
timepiece/contracts/tests/__init__.py
|
icekernel/django-timepiece
|
883cfcd50da3d1b411a43f3b6116342b49117ace
|
[
"MIT"
] | null | null | null |
timepiece/contracts/tests/__init__.py
|
icekernel/django-timepiece
|
883cfcd50da3d1b411a43f3b6116342b49117ace
|
[
"MIT"
] | null | null | null |
from .test_contracts import *
from .test_invoices import *
| 19.666667
| 29
| 0.79661
| 8
| 59
| 5.625
| 0.625
| 0.355556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135593
| 59
| 2
| 30
| 29.5
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
171084c3f05d7cfffa6b9449c5c9e47128b4b78b
| 127
|
py
|
Python
|
src/von_connector/templatetags/index.py
|
tushar-on/permitify
|
b1caee9995aca0d9450d7d8acc3f9621c3128493
|
[
"Apache-2.0"
] | null | null | null |
src/von_connector/templatetags/index.py
|
tushar-on/permitify
|
b1caee9995aca0d9450d7d8acc3f9621c3128493
|
[
"Apache-2.0"
] | null | null | null |
src/von_connector/templatetags/index.py
|
tushar-on/permitify
|
b1caee9995aca0d9450d7d8acc3f9621c3128493
|
[
"Apache-2.0"
] | null | null | null |
from django.template.defaulttags import register
@register.filter
def index(sequence, position):
return sequence[position]
| 25.4
| 48
| 0.811024
| 15
| 127
| 6.866667
| 0.8
| 0.31068
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.110236
| 127
| 5
| 49
| 25.4
| 0.911504
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
1735f7158b2470e7f9d26765325de83112b36033
| 8,552
|
py
|
Python
|
coba/tests/test_environments_definitions.py
|
VowpalWabbit/coba
|
f3ba37280ea6125dc334a501ba39b3d30696ef4b
|
[
"BSD-3-Clause"
] | 30
|
2020-08-06T22:17:34.000Z
|
2022-03-15T12:20:20.000Z
|
coba/tests/test_environments_definitions.py
|
VowpalWabbit/coba
|
f3ba37280ea6125dc334a501ba39b3d30696ef4b
|
[
"BSD-3-Clause"
] | 5
|
2021-02-25T02:06:22.000Z
|
2022-01-11T14:18:34.000Z
|
coba/tests/test_environments_definitions.py
|
VowpalWabbit/coba
|
f3ba37280ea6125dc334a501ba39b3d30696ef4b
|
[
"BSD-3-Clause"
] | 9
|
2020-11-25T19:55:44.000Z
|
2021-10-01T20:20:36.000Z
|
import json
import unittest
from coba.registry import CobaRegistry
from coba.exceptions import CobaException
from coba.environments.definitions import EnvironmentDefinitionFileV1
from coba.environments.primitives import SimulatedEnvironment
from coba.environments.openml import OpenmlSimulation
from coba.environments.filters import Take
class EnvironmentFileFmtV1_Tests(unittest.TestCase):
def setUp(self) -> None:
CobaRegistry.register("OpenmlSimulation", OpenmlSimulation)
CobaRegistry.register("Take", Take)
def test_one_environment(self):
json_txt = """{
"environments" : [
{ "OpenmlSimulation": 150 }
]
}"""
environments = EnvironmentDefinitionFileV1().filter(json.loads(json_txt))
self.assertIsInstance(environments[0], SimulatedEnvironment)
self.assertDictEqual({'openml':150, **environments[0].params}, environments[0].params)
def test_raw_environment(self):
json_txt = """{
"environments" : { "OpenmlSimulation": 150 }
}"""
environments = EnvironmentDefinitionFileV1().filter(json.loads(json_txt))
self.assertIsInstance(environments[0], SimulatedEnvironment)
self.assertDictEqual({'openml':150, **environments[0].params}, environments[0].params)
def test_one_environment_one_filter(self):
json_txt = """{
"environments" : [
[{ "OpenmlSimulation": 150 }, {"Take":10} ]
]
}"""
environments = EnvironmentDefinitionFileV1().filter(json.loads(json_txt))
self.assertIsInstance(environments[0], SimulatedEnvironment)
self.assertDictEqual({"openml":150, "take":10, **environments[0].params}, environments[0].params)
def test_one_environment_two_filters(self):
json_txt = """{
"environments" : [
[{ "OpenmlSimulation": 150 }, {"Take":[10,20], "method":"foreach"} ]
]
}"""
environments = EnvironmentDefinitionFileV1().filter(json.loads(json_txt))
self.assertEqual(2, len(environments))
self.assertIsInstance(environments[0], SimulatedEnvironment)
self.assertIsInstance(environments[1], SimulatedEnvironment)
self.assertDictEqual({"openml":150, "take":10, **environments[0].params}, environments[0].params)
self.assertDictEqual({"openml":150, "take":20, **environments[1].params}, environments[1].params)
def test_two_environments_two_filters(self):
json_txt = """{
"environments" : [
[{ "OpenmlSimulation": [150,151], "method":"foreach" }, { "Take":[10,20], "method":"foreach" }]
]
}"""
environments = EnvironmentDefinitionFileV1().filter(json.loads(json_txt))
self.assertEqual(4, len(environments))
self.assertIsInstance(environments[0], SimulatedEnvironment)
self.assertIsInstance(environments[1], SimulatedEnvironment)
self.assertIsInstance(environments[2], SimulatedEnvironment)
self.assertIsInstance(environments[3], SimulatedEnvironment)
self.assertDictEqual({"openml":150, "take":10, **environments[0].params}, environments[0].params)
self.assertDictEqual({"openml":150, "take":20, **environments[1].params}, environments[1].params)
self.assertDictEqual({"openml":151, "take":10, **environments[2].params}, environments[2].params)
self.assertDictEqual({"openml":151, "take":20, **environments[3].params}, environments[3].params)
def test_two_singular_environments(self):
json_txt = """{
"environments" : [
{"OpenmlSimulation": 150},
{"OpenmlSimulation": 151}
]
}"""
environments = EnvironmentDefinitionFileV1().filter(json.loads(json_txt))
self.assertIsInstance(environments[0], SimulatedEnvironment)
self.assertIsInstance(environments[1], SimulatedEnvironment)
self.assertDictEqual({"openml":150, **environments[0].params}, environments[0].params)
self.assertDictEqual({"openml":151, **environments[1].params}, environments[1].params)
def test_one_foreach_environment(self):
json_txt = """{
"environments" : [
{"OpenmlSimulation": [150,151], "method":"foreach"}
]
}"""
environments = EnvironmentDefinitionFileV1().filter(json.loads(json_txt))
self.assertIsInstance(environments[0], SimulatedEnvironment)
self.assertIsInstance(environments[1], SimulatedEnvironment)
self.assertDictEqual({"openml":150, **environments[0].params}, environments[0].params)
self.assertDictEqual({"openml":151, **environments[1].params}, environments[1].params)
def test_one_variable(self):
json_txt = """{
"variables" : {"$openml_sims": {"OpenmlSimulation": [150,151], "method":"foreach"} },
"environments" : [ "$openml_sims" ]
}"""
environments = EnvironmentDefinitionFileV1().filter(json.loads(json_txt))
self.assertIsInstance(environments[0], SimulatedEnvironment)
self.assertIsInstance(environments[1], SimulatedEnvironment)
self.assertDictEqual({"openml":150, **environments[0].params}, environments[0].params)
self.assertDictEqual({"openml":151, **environments[1].params}, environments[1].params)
def test_two_variables(self):
json_txt = """{
"variables": {
"$openmls": {"OpenmlSimulation": [150,151], "method":"foreach"},
"$takes" : {"Take":[10,20], "method":"foreach"}
},
"environments": [
["$openmls", "$takes"],
"$openmls"
]
}"""
environments = EnvironmentDefinitionFileV1().filter(json.loads(json_txt))
self.assertEqual(6, len(environments))
self.assertIsInstance(environments[0], SimulatedEnvironment)
self.assertIsInstance(environments[1], SimulatedEnvironment)
self.assertIsInstance(environments[2], SimulatedEnvironment)
self.assertIsInstance(environments[3], SimulatedEnvironment)
self.assertIsInstance(environments[4], SimulatedEnvironment)
self.assertIsInstance(environments[5], SimulatedEnvironment)
self.assertDictEqual({"openml":150, "take":10, **environments[0].params}, environments[0].params)
self.assertDictEqual({"openml":150, "take":20, **environments[1].params}, environments[1].params)
self.assertDictEqual({"openml":151, "take":10, **environments[2].params}, environments[2].params)
self.assertDictEqual({"openml":151, "take":20, **environments[3].params}, environments[3].params)
self.assertDictEqual({"openml":150 , **environments[4].params}, environments[4].params)
self.assertDictEqual({"openml":151 , **environments[5].params}, environments[5].params)
def test_pipe_list(self):
json_txt = """{
"environments" : [
[ {"OpenmlSimulation":150}, [ {"Take":10}, {"Take":20} ] ]
]
}"""
environments = EnvironmentDefinitionFileV1().filter(json.loads(json_txt))
self.assertEqual(2, len(environments))
self.assertIsInstance(environments[0], SimulatedEnvironment)
self.assertIsInstance(environments[1], SimulatedEnvironment)
self.assertDictEqual({"openml":150, "take":10, **environments[0].params}, environments[0].params)
self.assertDictEqual({"openml":150, "take":20, **environments[1].params}, environments[1].params)
def test_pipe_str(self):
json_txt = """{
"environments" : [
[ {"OpenmlSimulation":150}, "Identity" ]
]
}"""
environments = EnvironmentDefinitionFileV1().filter(json.loads(json_txt))
self.assertEqual(1, len(environments))
self.assertIsInstance(environments[0], SimulatedEnvironment)
self.assertDictEqual({"openml":150, **environments[0].params}, environments[0].params)
def test_bad_pipe_exception(self):
json_txt = """{
"environments" : [
[ {"OpenmlSimulation":150}, null ]
]
}"""
with self.assertRaises(CobaException) as e:
environments = EnvironmentDefinitionFileV1().filter(json.loads(json_txt))
self.assertIn("We were unable to construct",str(e.exception))
if __name__ == '__main__':
unittest.main()
| 43.191919
| 111
| 0.639383
| 748
| 8,552
| 7.219251
| 0.105615
| 0.079444
| 0.142222
| 0.082963
| 0.827963
| 0.814074
| 0.767407
| 0.76537
| 0.721296
| 0.672963
| 0
| 0.038444
| 0.215271
| 8,552
| 198
| 112
| 43.191919
| 0.766205
| 0
| 0
| 0.589744
| 0
| 0.012821
| 0.225652
| 0.008769
| 0
| 0
| 0
| 0
| 0.352564
| 1
| 0.083333
| false
| 0
| 0.051282
| 0
| 0.141026
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1771848f3d7f651cfe595073be8166543a5307fc
| 46
|
py
|
Python
|
fbone/coupon/__init__.py
|
edgarallang/dop-backend
|
c7c89b6145dfb895ab3dcb14172fa47afdbdf1be
|
[
"BSD-3-Clause"
] | 1
|
2015-12-14T17:53:34.000Z
|
2015-12-14T17:53:34.000Z
|
fbone/coupon/__init__.py
|
edgarallang/fbone
|
c7c89b6145dfb895ab3dcb14172fa47afdbdf1be
|
[
"BSD-3-Clause"
] | null | null | null |
fbone/coupon/__init__.py
|
edgarallang/fbone
|
c7c89b6145dfb895ab3dcb14172fa47afdbdf1be
|
[
"BSD-3-Clause"
] | null | null | null |
from .api import coupon
from .models import *
| 15.333333
| 23
| 0.76087
| 7
| 46
| 5
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 46
| 2
| 24
| 23
| 0.921053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
da4c72139af9e886002b3652d2e0a4030d5c6d45
| 33
|
py
|
Python
|
apps/blog/__init__.py
|
telesoho/pyblog
|
58fc500faeefc2559dac72a2878deacf2d7df769
|
[
"MIT"
] | null | null | null |
apps/blog/__init__.py
|
telesoho/pyblog
|
58fc500faeefc2559dac72a2878deacf2d7df769
|
[
"MIT"
] | null | null | null |
apps/blog/__init__.py
|
telesoho/pyblog
|
58fc500faeefc2559dac72a2878deacf2d7df769
|
[
"MIT"
] | null | null | null |
from .bp import app # noqa:F401
| 16.5
| 32
| 0.69697
| 6
| 33
| 3.833333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 0.212121
| 33
| 1
| 33
| 33
| 0.769231
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
da53cfd33b305c46933469b7dc26327b6674969b
| 79
|
py
|
Python
|
spacetimeformer/linear_model/__init__.py
|
Piki1989/spacetimeformer
|
7e0caf17dd03e5d25e2766c4f7132805779bcc40
|
[
"MIT"
] | 209
|
2021-09-28T13:59:56.000Z
|
2022-03-31T23:29:43.000Z
|
spacetimeformer/linear_model/__init__.py
|
Piki1989/spacetimeformer
|
7e0caf17dd03e5d25e2766c4f7132805779bcc40
|
[
"MIT"
] | 30
|
2021-09-30T07:53:38.000Z
|
2022-03-22T01:13:42.000Z
|
spacetimeformer/linear_model/__init__.py
|
Piki1989/spacetimeformer
|
7e0caf17dd03e5d25e2766c4f7132805779bcc40
|
[
"MIT"
] | 49
|
2021-10-29T22:47:20.000Z
|
2022-03-30T15:24:56.000Z
|
from .linear_ar import LinearModel
from .linear_model import Linear_Forecaster
| 26.333333
| 43
| 0.873418
| 11
| 79
| 6
| 0.636364
| 0.30303
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101266
| 79
| 2
| 44
| 39.5
| 0.929577
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
da6da577d15b91b38044b528a32bd024a09f1d69
| 43
|
py
|
Python
|
manga109/__init__.py
|
km2/manga109
|
49940576280aa39105ef778465190655b78d1019
|
[
"MIT"
] | null | null | null |
manga109/__init__.py
|
km2/manga109
|
49940576280aa39105ef778465190655b78d1019
|
[
"MIT"
] | null | null | null |
manga109/__init__.py
|
km2/manga109
|
49940576280aa39105ef778465190655b78d1019
|
[
"MIT"
] | null | null | null |
from .client import Manga109 # noqa: F401
| 21.5
| 42
| 0.744186
| 6
| 43
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.171429
| 0.186047
| 43
| 1
| 43
| 43
| 0.742857
| 0.232558
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
da70bfed2c85fda2bf5119c2db4f24b316e5049e
| 32
|
py
|
Python
|
test/files/legaluri/lagrum-basic.py
|
redhog/ferenda
|
6935e26fdc63adc68b8e852292456b8d9155b1f7
|
[
"BSD-2-Clause"
] | 18
|
2015-03-12T17:42:44.000Z
|
2021-12-27T10:32:22.000Z
|
test/files/legaluri/lagrum-basic.py
|
redhog/ferenda
|
6935e26fdc63adc68b8e852292456b8d9155b1f7
|
[
"BSD-2-Clause"
] | 13
|
2016-01-27T10:19:07.000Z
|
2021-12-13T20:24:36.000Z
|
test/files/legaluri/lagrum-basic.py
|
redhog/ferenda
|
6935e26fdc63adc68b8e852292456b8d9155b1f7
|
[
"BSD-2-Clause"
] | 6
|
2016-11-28T15:41:29.000Z
|
2022-01-08T11:16:48.000Z
|
{'law': '1998:204', 'type': 1}
| 16
| 31
| 0.46875
| 5
| 32
| 3
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.296296
| 0.15625
| 32
| 1
| 32
| 32
| 0.259259
| 0
| 0
| 0
| 0
| 0
| 0.483871
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
da7dcd1dbae2db51108e9d4c03500d827a66e520
| 1,916
|
py
|
Python
|
stocks/common.py
|
JackieMa000/problems
|
c521558830a0bbf67f94109af92d7be4397d0a43
|
[
"BSD-3-Clause"
] | null | null | null |
stocks/common.py
|
JackieMa000/problems
|
c521558830a0bbf67f94109af92d7be4397d0a43
|
[
"BSD-3-Clause"
] | 1
|
2020-10-23T04:06:56.000Z
|
2020-10-23T04:06:56.000Z
|
stocks/common.py
|
JackieMa000/problems
|
c521558830a0bbf67f94109af92d7be4397d0a43
|
[
"BSD-3-Clause"
] | null | null | null |
from typing import List
class Solution:
# 空间压缩,滚动数组
def maxProfit(self, k: int, prices: List[int]) -> int:
if not prices: return 0
n = len(prices)
# When K is larger than the prices.size, it goes to the same solution as what stocks.a122 does.
if k > n:
maxProfit = 0
for i in range(len(prices) - 1):
if prices[i + 1] > prices[i]:
maxProfit += (prices[i + 1] - prices[i])
return maxProfit
MP = [[[0, 0] for _ in range(k + 1)] for _ in range(2)]
# Initialize the data for the first day
MP[0] = [[0, 0]] + [[0, -prices[0]] for _ in range(k)]
for i in range(1, n):
x, y = i & 1, (i - 1) & 1
for kk in range(1, k + 1):
MP[x][kk][1] = max(MP[y][kk][1], MP[y][kk - 1][0] - prices[i])
MP[x][kk][0] = max(MP[y][kk][0], MP[y][kk][1] + prices[i])
return max(map(lambda x: x[0], MP[(n - 1) & 1]))
def maxProfit_1(self, k: int, prices: List[int]) -> int:
if not prices: return 0
n = len(prices)
# When K is larger thani the prices.size, it goes to the same solution as what stocks.a122 does.
if k > n:
maxProfit = 0
for i in range(len(prices) - 1):
if prices[i + 1] > prices[i]:
maxProfit += (prices[i + 1] - prices[i])
return maxProfit
MP = [[[0, 0] for _ in range(k + 1)] for _ in range(n)]
# Initialize the data for the first day
MP[0] = [[0, 0]] + [[0, -prices[0]] for _ in range(k)]
for i in range(1, n):
for kk in range(1, k + 1):
MP[i][kk][1] = max(MP[i - 1][kk][1], MP[i - 1][kk - 1][0] - prices[i])
MP[i][kk][0] = max(MP[i - 1][kk][0], MP[i - 1][kk][1] + prices[i])
return max(map(lambda x: x[0], MP[n - 1]))
| 36.846154
| 104
| 0.470772
| 313
| 1,916
| 2.859425
| 0.169329
| 0.093855
| 0.053631
| 0.049162
| 0.859218
| 0.82905
| 0.8
| 0.8
| 0.762011
| 0.762011
| 0
| 0.054098
| 0.363257
| 1,916
| 51
| 105
| 37.568627
| 0.679508
| 0.143006
| 0
| 0.628571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057143
| false
| 0
| 0.028571
| 0
| 0.228571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e53d3673d4e6ffe6730581f8c4647a8114fe6a13
| 220
|
py
|
Python
|
wikidump/processors/__init__.py
|
samuelebortolotti/wikidump
|
88b52bf7deadc10cf62c70ab3f37fd3a690be117
|
[
"MIT"
] | null | null | null |
wikidump/processors/__init__.py
|
samuelebortolotti/wikidump
|
88b52bf7deadc10cf62c70ab3f37fd3a690be117
|
[
"MIT"
] | null | null | null |
wikidump/processors/__init__.py
|
samuelebortolotti/wikidump
|
88b52bf7deadc10cf62c70ab3f37fd3a690be117
|
[
"MIT"
] | null | null | null |
from . import (
known_languages_extractor,
wikibreak_extractor,
user_warnings_templates,
user_warnings_extractor,
user_warnings_templates_tokens,
user_warnings_probabilistic_templates_extractor
)
| 24.444444
| 51
| 0.804545
| 22
| 220
| 7.409091
| 0.5
| 0.294479
| 0.257669
| 0.368098
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159091
| 220
| 8
| 52
| 27.5
| 0.881081
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.125
| 0
| 0.125
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e56647ab4cf860bf7f00aeebe003bcddd57cb6b6
| 77
|
py
|
Python
|
15th/codility/lesson03/FrogJmp/solution.py
|
WooJin1993/coding_test
|
ec9dc2dc768fe45700b4c0695b16535c0a824f6e
|
[
"MIT"
] | null | null | null |
15th/codility/lesson03/FrogJmp/solution.py
|
WooJin1993/coding_test
|
ec9dc2dc768fe45700b4c0695b16535c0a824f6e
|
[
"MIT"
] | null | null | null |
15th/codility/lesson03/FrogJmp/solution.py
|
WooJin1993/coding_test
|
ec9dc2dc768fe45700b4c0695b16535c0a824f6e
|
[
"MIT"
] | null | null | null |
from math import ceil
def solution(X, Y, D):
return ceil((Y-X) / D)
| 15.4
| 26
| 0.584416
| 14
| 77
| 3.214286
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.272727
| 77
| 5
| 26
| 15.4
| 0.803571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
e5666f999d5f59d8414387d232fbd937e5660041
| 28
|
py
|
Python
|
stko/molecular/periodic/__init__.py
|
SFin94/stko
|
7a913c7f0c4b616ddc52fef7eeb44c539176c351
|
[
"MIT"
] | null | null | null |
stko/molecular/periodic/__init__.py
|
SFin94/stko
|
7a913c7f0c4b616ddc52fef7eeb44c539176c351
|
[
"MIT"
] | null | null | null |
stko/molecular/periodic/__init__.py
|
SFin94/stko
|
7a913c7f0c4b616ddc52fef7eeb44c539176c351
|
[
"MIT"
] | null | null | null |
from .cell import * # noqa
| 14
| 27
| 0.642857
| 4
| 28
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 28
| 1
| 28
| 28
| 0.857143
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e592ca16238ac08e0767f7af7547bc0bb16d3092
| 467
|
py
|
Python
|
spikeextractors/extractors/neoextractors/__init__.py
|
KnierimLab/spikeextractors
|
716b1a91bd81fc4d6fbc7e0aef0ed6cf53cf4790
|
[
"MIT"
] | null | null | null |
spikeextractors/extractors/neoextractors/__init__.py
|
KnierimLab/spikeextractors
|
716b1a91bd81fc4d6fbc7e0aef0ed6cf53cf4790
|
[
"MIT"
] | null | null | null |
spikeextractors/extractors/neoextractors/__init__.py
|
KnierimLab/spikeextractors
|
716b1a91bd81fc4d6fbc7e0aef0ed6cf53cf4790
|
[
"MIT"
] | null | null | null |
from .plexonextractor import PlexonRecordingExtractor, PlexonSortingExtractor
from .neuralynxextractor import NeuralynxRecordingExtractor, NeuralynxNrdRecordingExtractor, NeuralynxSortingExtractor
from .mcsrawrecordingextractor import MCSRawRecordingExtractor
from .blackrockextractor import BlackrockRecordingExtractor, BlackrockSortingExtractor
from .axonaextractor import AxonaRecordingExtractor
from .spikegadgetsextractor import SpikeGadgetsRecordingExtractor
| 66.714286
| 119
| 0.914347
| 28
| 467
| 15.25
| 0.607143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06424
| 467
| 6
| 120
| 77.833333
| 0.977117
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e5cb23149317b7bdd70638c3fc07f01628337670
| 375
|
py
|
Python
|
bigfish/detection/tests/test_spot_detection.py
|
4DNucleome/big-fish
|
5512b6e3274872793ef4365a6dc423c72add91f9
|
[
"BSD-3-Clause"
] | 17
|
2020-03-04T10:46:37.000Z
|
2022-03-10T13:15:16.000Z
|
bigfish/detection/tests/test_spot_detection.py
|
4DNucleome/big-fish
|
5512b6e3274872793ef4365a6dc423c72add91f9
|
[
"BSD-3-Clause"
] | 48
|
2020-03-16T13:39:44.000Z
|
2022-03-31T17:26:50.000Z
|
bigfish/detection/tests/test_spot_detection.py
|
4DNucleome/big-fish
|
5512b6e3274872793ef4365a6dc423c72add91f9
|
[
"BSD-3-Clause"
] | 15
|
2020-03-04T16:02:31.000Z
|
2022-02-17T14:11:15.000Z
|
# -*- coding: utf-8 -*-
# Author: Arthur Imbert <arthur.imbert.pro@gmail.com>
# License: BSD 3 clause
"""
Unitary tests for bigfish.detection.spot_detection module.
"""
# TODO test bigfish.detection.detect_spots
# TODO test bigfish.detection.local_maximum_detection
# TODO test bigfish.detection.spots_thresholding
# TODO test bigfish.detection.automated_threshold_setting
| 28.846154
| 58
| 0.786667
| 49
| 375
| 5.877551
| 0.612245
| 0.277778
| 0.208333
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00597
| 0.106667
| 375
| 12
| 59
| 31.25
| 0.853731
| 0.936
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0.083333
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e5f0e3ce2aecf349029fb8379acce36acc02990f
| 29
|
py
|
Python
|
fanwood/FanwoodText_build.py
|
chemoelectric/sortsmill
|
90b97a9296582211a133970bb577013c9c86ed81
|
[
"MIT"
] | 1
|
2021-10-14T20:56:30.000Z
|
2021-10-14T20:56:30.000Z
|
fanwood/FanwoodText_build.py
|
chemoelectric/sortsmill
|
90b97a9296582211a133970bb577013c9c86ed81
|
[
"MIT"
] | null | null | null |
fanwood/FanwoodText_build.py
|
chemoelectric/sortsmill
|
90b97a9296582211a133970bb577013c9c86ed81
|
[
"MIT"
] | null | null | null |
from Fanwood_build import *
| 9.666667
| 27
| 0.793103
| 4
| 29
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172414
| 29
| 2
| 28
| 14.5
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e5f53d771e613e5cea040e622cf6defdc2980e62
| 204
|
py
|
Python
|
main/problemsets/tests/__init__.py
|
mahkhaled/class2go
|
b32cb441e8d96c257f70cb61274812ebeed2649d
|
[
"Apache-2.0"
] | 2
|
2015-10-31T23:12:52.000Z
|
2021-01-19T11:03:00.000Z
|
main/problemsets/tests/__init__.py
|
sunu/class2go
|
653b1edd01d390ad387dd788e0fc2d89445fbcab
|
[
"Apache-2.0"
] | null | null | null |
main/problemsets/tests/__init__.py
|
sunu/class2go
|
653b1edd01d390ad387dd788e0fc2d89445fbcab
|
[
"Apache-2.0"
] | null | null | null |
# Need to import because the filenames don't match nosetest loader
# (i.e. contain [Tt]est on a word boundary)
from problemsets.tests.views_advanced import *
from problemsets.tests.views_simple import *
| 34
| 66
| 0.789216
| 32
| 204
| 4.96875
| 0.8125
| 0.188679
| 0.251572
| 0.314465
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137255
| 204
| 5
| 67
| 40.8
| 0.903409
| 0.519608
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f92902150305aa678c87a0f8a63a642bc6f5eab2
| 618
|
py
|
Python
|
pypiorg/data/__all_models.py
|
paulburnz314/flask_talkpython
|
65a13c0fc6ab37d13cc996172d7e120e346116a9
|
[
"MIT"
] | null | null | null |
pypiorg/data/__all_models.py
|
paulburnz314/flask_talkpython
|
65a13c0fc6ab37d13cc996172d7e120e346116a9
|
[
"MIT"
] | null | null | null |
pypiorg/data/__all_models.py
|
paulburnz314/flask_talkpython
|
65a13c0fc6ab37d13cc996172d7e120e346116a9
|
[
"MIT"
] | null | null | null |
# Add all your SQLAlchemy models here.
# This allows us to import just this file when
# we need to preload the models and ensure they
# are all loaded.
# noinspection PyUnresolvedReferences
import pypiorg.data.downloads
# noinspection PyUnresolvedReferences
from pypiorg import data
# noinspection PyUnresolvedReferences
import pypiorg.data.licenses
# noinspection PyUnresolvedReferences
import pypiorg.data.maintainers
# noinspection PyUnresolvedReferences
import pypiorg.data.package
# noinspection PyUnresolvedReferences
import pypiorg.data.releases
# noinspection PyUnresolvedReferences
import pypiorg.data.users
| 30.9
| 47
| 0.847896
| 69
| 618
| 7.594203
| 0.492754
| 0.454198
| 0.458015
| 0.538168
| 0.583969
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11165
| 618
| 19
| 48
| 32.526316
| 0.954463
| 0.639159
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
00993dbd98a4902fa105d6cd3f11866eb2a6d3c1
| 46
|
py
|
Python
|
items/tasks/__init__.py
|
lalanza808/xmrauctions
|
992f0e605e566610d03c6e388ce70dcfa58864b3
|
[
"MIT"
] | 3
|
2020-01-07T13:01:59.000Z
|
2020-11-25T01:27:53.000Z
|
items/tasks/__init__.py
|
lalanza808/xmrauctions
|
992f0e605e566610d03c6e388ce70dcfa58864b3
|
[
"MIT"
] | 6
|
2020-01-02T21:33:04.000Z
|
2022-03-12T00:10:40.000Z
|
items/tasks/__init__.py
|
lalanza808/xmrauctions
|
992f0e605e566610d03c6e388ce70dcfa58864b3
|
[
"MIT"
] | 2
|
2020-02-01T18:03:07.000Z
|
2020-07-22T18:47:22.000Z
|
from items.tasks import cleanup, notifications
| 46
| 46
| 0.869565
| 6
| 46
| 6.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 46
| 1
| 46
| 46
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
dad4b53aac2155c30bb9127184ef7a2ce6bb504b
| 14,526
|
py
|
Python
|
tests/test_modules.py
|
KaiyuYue/torchshard
|
89e21def180bf6063ceb2e312a61631173abc7e7
|
[
"Apache-2.0"
] | 265
|
2021-04-27T12:06:45.000Z
|
2022-03-17T11:13:17.000Z
|
tests/test_modules.py
|
poodarchu/torchshard
|
667cfce9ed3e2170c7768d910a71aa07897857e7
|
[
"Apache-2.0"
] | 7
|
2021-05-24T06:54:44.000Z
|
2022-01-01T18:47:38.000Z
|
tests/test_modules.py
|
KaiyuYue/torchshard
|
89e21def180bf6063ceb2e312a61631173abc7e7
|
[
"Apache-2.0"
] | 11
|
2021-04-28T04:15:44.000Z
|
2022-01-26T04:29:30.000Z
|
from typing import Optional, List, Callable, Tuple
import torch
import random
import sys
import torch.nn.functional as F
import torch.nn.parallel as parallel
import torch.multiprocessing as mp
import torch.nn.parallel as paralle
import torch.distributed as dist
import unittest
import torchshard as ts
from testing import IdentityLayer, IdentityLayer2D, IdentityLayer3D
from testing import CausalSelfAttention, ParallelCausalSelfAttention
from testing import MLP, ParallelMLP
from testing import dist_worker, assertEqual, set_seed
from testing import loss_reduction_type, threshold
class TestLayers(unittest.TestCase):
@staticmethod
def run_test_parallel_self_attention(local_rank: int) -> None:
seed = 1235
batch_size = 10
sequence_length = 12
vocab_size = 12
hidden_size = 8
num_att_heads_per_partition = 6
hidden_size_per_att_head = 8
dropout_prob = 0.0 # has to be zero
tensor_model_parallel_size = ts.distributed.get_group_size()
world_size = ts.distributed.get_world_size()
set_seed(seed + local_rank)
num_att_heads = num_att_heads_per_partition * world_size
hidden_size = hidden_size_per_att_head * num_att_heads
attention_mask = torch.randn(batch_size, 1, 1, sequence_length).cuda(local_rank)
x = torch.randn(batch_size, sequence_length, hidden_size).cuda(local_rank)
y = torch.randint(10, (batch_size,)).cuda(local_rank)
raw_model = ParallelCausalSelfAttention(hidden_size, num_att_heads, dropout_prob).cuda(local_rank)
raw_model = parallel.DistributedDataParallel(raw_model, device_ids=[local_rank])
ts.register_ddp_parameters_to_ignore(raw_model)
ddp_model = parallel.DistributedDataParallel(
CausalSelfAttention(hidden_size, num_att_heads, dropout_prob).cuda(local_rank),
device_ids=[local_rank]
)
raw_criterion = ts.nn.ParallelCrossEntropyLoss(reduction=loss_reduction_type).cuda(local_rank)
ddp_criterion = torch.nn.CrossEntropyLoss(reduction=loss_reduction_type).cuda(local_rank)
# align weight & bias
for (on, op), (pn, pp) in zip(ddp_model.named_parameters(), raw_model.named_parameters()):
parallel_dim = ts.get_parallel_dim(pp)
if parallel_dim == None:
pp.data.copy_(op.data)
elif parallel_dim == 0:
if len(pp.shape) == 2:
pp.data.copy_(ts.distributed.scatter(op.data, dim=-1))
else:
pp.data.copy_(op.data)
elif parallel_dim in [1, -1]:
pp.data.copy_(ts.distributed.scatter(op.data, dim=0))
# switch mode
raw_model.train()
ddp_model.train()
attention_mask = ts.distributed.gather(attention_mask, dim=0)
x = ts.distributed.gather(x, dim=0)
y = ts.distributed.gather(y, dim=0)
y1 = raw_model(x, attention_mask)
y2 = ddp_model(x, attention_mask)
# 1st assert: forward outputs
assertEqual(y1, y2, threshold=threshold)
raw_loss = raw_criterion(y1.view(batch_size*tensor_model_parallel_size, -1), y)
ddp_loss = ddp_criterion(y2.view(batch_size*tensor_model_parallel_size, -1), y)
if loss_reduction_type == 'none':
raw_loss = raw_loss.sum()
ddp_loss = ddp_loss.sum()
# 2nd assert: forward losses
assertEqual(raw_loss, ddp_loss, threshold=threshold)
# 3rd assert: backward gradients
raw_loss.backward()
ddp_loss.backward()
for (on, op), (pn, pp) in zip(ddp_model.named_parameters(), raw_model.named_parameters()):
parallel_dim = ts.get_parallel_dim(pp)
if parallel_dim == None:
assertEqual(pp.grad, op.grad, threshold=threshold)
elif parallel_dim == 0:
if len(pp.shape) == 2:
pp_grad = ts.distributed.reduce(pp.grad)
op_grad = ts.distributed.reduce(ts.distributed.scatter(op.grad, dim=1))
assertEqual(pp_grad, op_grad, threshold=threshold)
else:
assertEqual(pp.grad, op.grad, threshold=threshold)
elif parallel_dim in [1, -1]:
pp_grad = ts.distributed.reduce(pp.grad)
op_grad = ts.distributed.reduce(ts.distributed.scatter(op.grad, dim=0))
assertEqual(pp_grad, op_grad, threshold=threshold)
@staticmethod
def run_test_parallel_mlp(local_rank: int) -> None:
# settings
seed = 12345
batch_size = 10
sequence_length = 12
vocab_size = 12
hidden_size = 8
dropout_prob = 0.0 # has to be zero
tensor_model_parallel_size = ts.distributed.get_group_size()
world_size = ts.distributed.get_world_size()
# test parallel_dim = None
set_seed(seed + local_rank)
loss_weight = torch.randn(batch_size, sequence_length, hidden_size).cuda(local_rank)
attention_mask = torch.randn(batch_size, 1, 1, sequence_length).cuda(local_rank)
input_data = torch.randn(batch_size, sequence_length, hidden_size).cuda(local_rank)
dist.broadcast(loss_weight, src=0)
dist.broadcast(attention_mask, src=0)
dist.broadcast(input_data, src=0)
# build attention module
original_model = MLP(hidden_size, dropout_prob).cuda(local_rank)
parallel_model = MLP(hidden_size, dropout_prob).cuda(local_rank)
# we convert nn.Linear() to ts.nn.ParallelLinear()
cnt = 0
for n, m in parallel_model.named_modules():
if isinstance(m, torch.nn.Linear) and cnt == 0: # first linear layer
parallel_model.mlp[0] = ts.nn.ParallelLinear.convert_parallel_linear(m, dim=1)
cnt += 1
continue
if isinstance(m, torch.nn.Linear) and cnt == 1: # second linear layer
parallel_model.mlp[2] = ts.nn.RegisterParallelDim(dim=-1)
parallel_model.mlp[3] = ts.nn.ParallelLinear.convert_parallel_linear(m, dim=0)
original_model = parallel.DistributedDataParallel(original_model, device_ids=[local_rank])
parallel_model = parallel.DistributedDataParallel(parallel_model, device_ids=[local_rank])
ts.register_ddp_parameters_to_ignore(parallel_model)
# align weight & bias
for (on, op), (pn, pp) in zip(original_model.named_parameters(), parallel_model.named_parameters()):
parallel_dim = ts.get_parallel_dim(pp)
if parallel_dim == None:
pp.data.copy_(op.data)
elif parallel_dim == 0:
if len(pp.shape) == 2:
pp.data.copy_(ts.distributed.scatter(op.data, dim=-1))
else:
pp.data.copy_(op.data)
elif parallel_dim in [1, -1]:
pp.data.copy_(ts.distributed.scatter(op.data, dim=0))
# switch mode
original_model.train()
parallel_model.train()
# assert: weight and bias
for (on, op), (pn, pp) in zip(original_model.named_parameters(), parallel_model.named_parameters()):
parallel_dim = ts.get_parallel_dim(pp)
if parallel_dim == None:
assertEqual(op, pp, threshold=threshold)
elif parallel_dim == 0:
if len(pp.shape) == 2:
assertEqual(pp, ts.distributed.scatter(op.data, dim=-1), threshold=threshold)
else:
assertEqual(pp, op, threshold=threshold)
elif parallel_dim in [1, -1]:
assertEqual(pp, ts.distributed.scatter(op.data, dim=0), threshold=threshold)
# 1st assert: forward outputs
parallel_output = parallel_model(input_data)
original_output = original_model(input_data)
assertEqual(original_output, parallel_output, threshold=threshold)
# 2nd assert: forward losses
original_loss = torch.mul(original_output, loss_weight)
parallel_loss = torch.mul(parallel_output, loss_weight)
original_loss.sum().backward()
parallel_loss.sum().backward()
assertEqual(original_loss, parallel_loss, threshold=threshold)
# 3rd assert: backward gradients
for (on, op), (pn, pp) in zip(original_model.named_parameters(), parallel_model.named_parameters()):
parallel_dim = ts.get_parallel_dim(pp)
if parallel_dim == None:
assertEqual(pp.grad, op.grad, threshold=threshold)
elif parallel_dim == 0:
if len(pp.shape) == 2:
pp_grad = ts.distributed.reduce(pp.grad)
op_grad = ts.distributed.reduce(ts.distributed.scatter(op.grad, dim=1))
assertEqual(pp_grad, op_grad, threshold=threshold)
else:
assertEqual(pp.grad, op.grad, threshold=threshold)
elif parallel_dim in [1, -1]:
pp_grad = ts.distributed.reduce(pp.grad)
op_grad = ts.distributed.reduce(ts.distributed.scatter(op.grad, dim=0))
assertEqual(pp_grad, op_grad, threshold=threshold)
@staticmethod
def run_test_parallel_transformer_block(local_rank: int) -> None:
seed = 123
batch_size = 10
sequence_length = 12
vocab_size = 12
hidden_size = 8
num_att_heads_per_partition = 6
hidden_size_per_att_head = 8
dropout_prob = 0.0 # has to be zero
tensor_model_parallel_size = ts.distributed.get_group_size()
world_size = ts.distributed.get_world_size()
set_seed(seed + local_rank)
num_att_heads = num_att_heads_per_partition * world_size
hidden_size = hidden_size_per_att_head * num_att_heads
attention_mask = torch.randn(batch_size, 1, 1, sequence_length).cuda(local_rank)
x = torch.randn(batch_size, sequence_length, hidden_size).cuda(local_rank)
y = torch.randint(10, (batch_size,)).cuda(local_rank)
raw_model = torch.nn.Sequential(
ParallelCausalSelfAttention(hidden_size, num_att_heads, dropout_prob),
ParallelMLP(hidden_size, dropout_prob)
).cuda(local_rank)
raw_model = parallel.DistributedDataParallel(raw_model, device_ids=[local_rank])
ts.register_ddp_parameters_to_ignore(raw_model)
ddp_model = parallel.DistributedDataParallel(
torch.nn.Sequential(
CausalSelfAttention(hidden_size, num_att_heads, dropout_prob),
MLP(hidden_size, dropout_prob)
).cuda(local_rank),
device_ids=[local_rank]
)
raw_criterion = ts.nn.ParallelCrossEntropyLoss(reduction=loss_reduction_type).cuda(local_rank)
ddp_criterion = torch.nn.CrossEntropyLoss(reduction=loss_reduction_type).cuda(local_rank)
# align weight & bias
for (on, op), (pn, pp) in zip(ddp_model.named_parameters(), raw_model.named_parameters()):
parallel_dim = ts.get_parallel_dim(pp)
if parallel_dim == None:
pp.data.copy_(op.data)
elif parallel_dim == 0:
if len(pp.shape) == 2:
pp.data.copy_(ts.distributed.scatter(op.data, dim=-1))
else:
pp.data.copy_(op.data)
elif parallel_dim in [1, -1]:
pp.data.copy_(ts.distributed.scatter(op.data, dim=0))
# switch mode
raw_model.train()
ddp_model.train()
attention_mask = ts.distributed.gather(attention_mask, dim=0)
x = ts.distributed.gather(x, dim=0)
y = ts.distributed.gather(y, dim=0)
y1 = raw_model.module[0](x, attention_mask)
y1 = raw_model.module[1](y1)
y2 = ddp_model.module[0](x, attention_mask)
y2 = ddp_model.module[1](y2)
# 1st assert: forward outputs
assertEqual(y1, y2, threshold=threshold)
raw_loss = raw_criterion(y1.view(batch_size*tensor_model_parallel_size, -1), y)
ddp_loss = ddp_criterion(y2.view(batch_size*tensor_model_parallel_size, -1), y)
if loss_reduction_type == 'none':
raw_loss = raw_loss.sum()
ddp_loss = ddp_loss.sum()
# 2nd assert: forward losses
assertEqual(raw_loss, ddp_loss, threshold=threshold)
# 3rd assert: backward gradients
raw_loss.backward()
ddp_loss.backward()
# 3rd assert: backward gradients
for (on, op), (pn, pp) in zip(ddp_model.named_parameters(), raw_model.named_parameters()):
parallel_dim = ts.get_parallel_dim(pp)
if parallel_dim == None:
assertEqual(pp.grad, op.grad, threshold=threshold)
elif parallel_dim == 0:
if len(pp.shape) == 2:
pp_grad = ts.distributed.reduce(pp.grad)
op_grad = ts.distributed.reduce(ts.distributed.scatter(op.grad, dim=-1))
assertEqual(pp_grad, op_grad, threshold=threshold)
else:
assertEqual(pp.grad, op.grad, threshold=threshold)
elif parallel_dim in [1, -1]:
assertEqual(pp.grad, ts.distributed.scatter(op.grad, dim=0))
@unittest.skipIf(not torch.cuda.is_available(), 'CUDA is not available')
def test_parallel_self_attention(self):
ngpus = torch.cuda.device_count()
mp.spawn(
dist_worker,
args=(self.run_test_parallel_self_attention, ngpus),
nprocs=ngpus
)
ts.distributed.destroy_process_group()
@unittest.skipIf(not torch.cuda.is_available(), 'CUDA is not available')
def test_parallel_mlp(self):
ngpus = torch.cuda.device_count()
mp.spawn(
dist_worker,
args=(self.run_test_parallel_mlp, ngpus),
nprocs=ngpus
)
ts.distributed.destroy_process_group()
@unittest.skipIf(not torch.cuda.is_available(), 'CUDA is not available')
def test_parallel_transformer_block(self):
ngpus = torch.cuda.device_count()
mp.spawn(
dist_worker,
args=(self.run_test_parallel_transformer_block, ngpus),
nprocs=ngpus
)
ts.distributed.destroy_process_group()
if __name__ == '__main__':
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
unittest.main()
| 41.502857
| 108
| 0.634173
| 1,801
| 14,526
| 4.855636
| 0.104386
| 0.057976
| 0.028245
| 0.021955
| 0.819783
| 0.784791
| 0.775529
| 0.76821
| 0.718125
| 0.708519
| 0
| 0.014136
| 0.269517
| 14,526
| 349
| 109
| 41.621777
| 0.810008
| 0.041168
| 0
| 0.67037
| 0
| 0
| 0.005683
| 0
| 0
| 0
| 0
| 0
| 0.085185
| 1
| 0.022222
| false
| 0
| 0.059259
| 0
| 0.085185
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
976b2720655184b59743d8e3a18984b55d69a58d
| 10,052
|
py
|
Python
|
dockerManager/views.py
|
uzairAK/serverom-panel
|
3dcde05ad618e6bef280db7d3180f926fe2ab1db
|
[
"MIT"
] | null | null | null |
dockerManager/views.py
|
uzairAK/serverom-panel
|
3dcde05ad618e6bef280db7d3180f926fe2ab1db
|
[
"MIT"
] | null | null | null |
dockerManager/views.py
|
uzairAK/serverom-panel
|
3dcde05ad618e6bef280db7d3180f926fe2ab1db
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from django.shortcuts import render, redirect, HttpResponse
from loginSystem.models import Administrator
from loginSystem.views import loadLoginPage
from .container import ContainerManager
from .decorators import preDockerRun
from plogical.acl import ACLManager
import json
# Create your views here.
# This function checks if user has admin permissions
def dockerPermission(request, userID, context):
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] != 1:
if request.method == "POST":
return ACLManager.loadErrorJson()
else:
return ACLManager.loadError()
else:
return 0
@preDockerRun
def loadDockerHome(request):
try:
userID = request.session['userID']
perm = dockerPermission(request, userID, 'loadDockerHome')
if perm: return perm
admin = Administrator.objects.get(pk=userID)
return render(request,'dockerManager/index.html',{"type":admin.type})
except KeyError:
return redirect(loadLoginPage)
def installDocker(request):
try:
userID = request.session['userID']
perm = dockerPermission(request, userID, 'loadDockerHome')
if perm: return perm
cm = ContainerManager(userID, 'submitInstallDocker')
cm.start()
data_ret = {'status': 1, 'error_message': 'None'}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
except BaseException as msg:
data_ret = {'status': 0, 'error_message': str(msg)}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
@preDockerRun
def installImage(request):
try:
userID = request.session['userID']
perm = dockerPermission(request, userID, 'loadDockerHome')
if perm: return perm
cm = ContainerManager()
coreResult = cm.submitInstallImage(userID, json.loads(request.body))
return coreResult
except KeyError:
return redirect(loadLoginPage)
@preDockerRun
def viewContainer(request, name):
try:
if not request.GET._mutable:
request.GET._mutable = True
request.GET['name'] = name
userID = request.session['userID']
perm = dockerPermission(request, userID, 'loadDockerHome')
if perm: return perm
cm = ContainerManager(name)
coreResult = cm.loadContainerHome(request, userID)
return coreResult
except KeyError:
return redirect(loadLoginPage)
@preDockerRun
def getTags(request):
try:
userID = request.session['userID']
perm = dockerPermission(request, userID, 'loadDockerHome')
if perm: return perm
cm = ContainerManager()
coreResult = cm.getTags(userID, json.loads(request.body))
return coreResult
except KeyError:
return redirect(loadLoginPage)
@preDockerRun
def delContainer(request):
try:
userID = request.session['userID']
perm = dockerPermission(request, userID, 'loadDockerHome')
if perm: return perm
cm = ContainerManager()
coreResult = cm.submitContainerDeletion(userID, json.loads(request.body))
return coreResult
except KeyError:
return redirect(loadLoginPage)
@preDockerRun
def recreateContainer(request):
try:
userID = request.session['userID']
perm = dockerPermission(request, userID, 'loadDockerHome')
if perm: return perm
cm = ContainerManager()
coreResult = cm.recreateContainer(userID, json.loads(request.body))
return coreResult
except KeyError:
return redirect(loadLoginPage)
@preDockerRun
def runContainer(request):
try:
userID = request.session['userID']
perm = dockerPermission(request, userID, 'loadDockerHome')
if perm: return perm
cm = ContainerManager()
return cm.createContainer(request, userID)
except KeyError:
return redirect(loadLoginPage)
@preDockerRun
def listContainers(request):
try:
userID = request.session['userID']
perm = dockerPermission(request, userID, 'loadDockerHome')
if perm: return perm
cm = ContainerManager()
return cm.listContainers(request, userID)
except KeyError:
return redirect(loadLoginPage)
@preDockerRun
def getContainerLogs(request):
try:
userID = request.session['userID']
perm = dockerPermission(request, userID, 'loadDockerHome')
if perm: return perm
cm = ContainerManager()
coreResult = cm.getContainerLogs(userID, json.loads(request.body))
return coreResult
except KeyError:
return redirect(loadLoginPage)
@preDockerRun
def submitContainerCreation(request):
try:
userID = request.session['userID']
perm = dockerPermission(request, userID, 'loadDockerHome')
if perm: return perm
cm = ContainerManager()
coreResult = cm.submitContainerCreation(userID, json.loads(request.body))
return coreResult
except KeyError:
return redirect(loadLoginPage)
@preDockerRun
def getContainerList(request):
try:
userID = request.session['userID']
perm = dockerPermission(request, userID, 'loadDockerHome')
if perm: return perm
cm = ContainerManager()
return cm.getContainerList(userID, json.loads(request.body))
except KeyError:
return redirect(loadLoginPage)
@preDockerRun
def doContainerAction(request):
try:
userID = request.session['userID']
perm = dockerPermission(request, userID, 'loadDockerHome')
if perm: return perm
cm = ContainerManager()
coreResult = cm.doContainerAction(userID, json.loads(request.body))
return coreResult
except KeyError:
return redirect(loadLoginPage)
@preDockerRun
def getContainerStatus(request):
try:
userID = request.session['userID']
perm = dockerPermission(request, userID, 'loadDockerHome')
if perm: return perm
cm = ContainerManager()
coreResult = cm.getContainerStatus(userID, json.loads(request.body))
return coreResult
except KeyError:
return redirect(loadLoginPage)
@preDockerRun
def exportContainer(request):
try:
userID = request.session['userID']
perm = dockerPermission(request, userID, 'loadDockerHome')
if perm: return perm
cm = ContainerManager()
coreResult = cm.exportContainer(request, userID)
return coreResult
except KeyError:
return redirect(loadLoginPage)
@preDockerRun
def saveContainerSettings(request):
try:
userID = request.session['userID']
perm = dockerPermission(request, userID, 'loadDockerHome')
if perm: return perm
cm = ContainerManager()
coreResult = cm.saveContainerSettings(userID, json.loads(request.body))
return coreResult
except KeyError:
return redirect(loadLoginPage)
@preDockerRun
def getContainerTop(request):
try:
userID = request.session['userID']
perm = dockerPermission(request, userID, 'loadDockerHome')
if perm: return perm
cm = ContainerManager()
coreResult = cm.getContainerTop(userID, json.loads(request.body))
return coreResult
except KeyError:
return redirect(loadLoginPage)
@preDockerRun
def assignContainer(request):
try:
userID = request.session['userID']
perm = dockerPermission(request, userID, 'loadDockerHome')
if perm: return perm
cm = ContainerManager()
coreResult = cm.assignContainer(userID, json.loads(request.body))
return coreResult
except KeyError:
return redirect(loadLoginPage)
@preDockerRun
def searchImage(request):
try:
userID = request.session['userID']
perm = dockerPermission(request, userID, 'loadDockerHome')
if perm: return perm
cm = ContainerManager()
coreResult = cm.searchImage(userID, json.loads(request.body))
return coreResult
except KeyError:
return redirect(loadLoginPage)
@preDockerRun
def images(request):
try:
userID = request.session['userID']
perm = dockerPermission(request, userID, 'images')
if perm: return perm
cm = ContainerManager()
coreResult = cm.images(request, userID)
return coreResult
except KeyError:
return redirect(loadLoginPage)
@preDockerRun
def manageImages(request):
try:
userID = request.session['userID']
perm = dockerPermission(request, userID, 'loadDockerHome')
if perm: return perm
cm = ContainerManager()
coreResult = cm.manageImages(request, userID)
return coreResult
except KeyError:
return redirect(loadLoginPage)
@preDockerRun
def getImageHistory(request):
try:
userID = request.session['userID']
perm = dockerPermission(request, userID, 'loadDockerHome')
if perm: return perm
cm = ContainerManager()
coreResult = cm.getImageHistory(userID, json.loads(request.body))
return coreResult
except KeyError:
return redirect(loadLoginPage)
@preDockerRun
def removeImage(request):
try:
userID = request.session['userID']
perm = dockerPermission(request, userID, 'loadDockerHome')
if perm: return perm
cm = ContainerManager()
coreResult = cm.removeImage(userID, json.loads(request.body))
return coreResult
except KeyError:
return redirect(loadLoginPage)
| 28.475921
| 81
| 0.638878
| 900
| 10,052
| 7.122222
| 0.121111
| 0.060842
| 0.10858
| 0.093292
| 0.769891
| 0.759438
| 0.759438
| 0.750702
| 0.743526
| 0.721997
| 0
| 0.000687
| 0.275965
| 10,052
| 353
| 82
| 28.475921
| 0.880049
| 0.00955
| 0
| 0.741697
| 0
| 0
| 0.055662
| 0.002411
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088561
| false
| 0
| 0.02583
| 0
| 0.295203
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
977c5b0b23e610ff5f149caa6da53ccd18ac6e79
| 19
|
py
|
Python
|
ratings/cruds/__init__.py
|
Platzi-Master-C8/gethired-jobplacement-ratings-backend
|
afa5ae3a749f9fcab863832d7db0928711a3f4e0
|
[
"MIT"
] | 1
|
2021-12-12T07:22:16.000Z
|
2021-12-12T07:22:16.000Z
|
ratings/cruds/__init__.py
|
Platzi-Master-C8/gethired-jobplacement-ratings-backend
|
afa5ae3a749f9fcab863832d7db0928711a3f4e0
|
[
"MIT"
] | 57
|
2021-12-21T17:56:48.000Z
|
2022-03-06T21:17:39.000Z
|
ratings/cruds/__init__.py
|
Platzi-Master-C8/gethired-jobplacement-ratings-backend
|
afa5ae3a749f9fcab863832d7db0928711a3f4e0
|
[
"MIT"
] | 5
|
2021-12-04T21:09:51.000Z
|
2022-01-29T16:14:02.000Z
|
from . import crud
| 9.5
| 18
| 0.736842
| 3
| 19
| 4.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 19
| 1
| 19
| 19
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
97aa53cc580ee18a0a4d99194c3df0e139dcc2f4
| 46
|
py
|
Python
|
tests/inputs/misc/13-call-fail.py
|
helq/pytropos
|
497ed5902e6e4912249ca0a46b477f9bfa6ae80a
|
[
"MIT"
] | 4
|
2019-10-06T18:01:24.000Z
|
2020-07-03T05:27:35.000Z
|
tests/inputs/misc/13-call-fail.py
|
helq/pytropos
|
497ed5902e6e4912249ca0a46b477f9bfa6ae80a
|
[
"MIT"
] | 5
|
2021-06-07T15:50:04.000Z
|
2021-06-07T15:50:06.000Z
|
tests/inputs/misc/13-call-fail.py
|
helq/pytropos
|
497ed5902e6e4912249ca0a46b477f9bfa6ae80a
|
[
"MIT"
] | null | null | null |
(5)(3)
l = [a, 21, l]
l(3)
a(20, 21, *n, b=m)
| 9.2
| 18
| 0.369565
| 14
| 46
| 1.214286
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.257143
| 0.23913
| 46
| 4
| 19
| 11.5
| 0.228571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c1774664c2fc93004cdd625d36c6951f789f9a28
| 321
|
py
|
Python
|
seglibpython/seglib/multicuts/weight_modifier.py
|
DerThorsten/seglib
|
4655079e390e301dd93e53f5beed6c9737d6df9f
|
[
"MIT"
] | null | null | null |
seglibpython/seglib/multicuts/weight_modifier.py
|
DerThorsten/seglib
|
4655079e390e301dd93e53f5beed6c9737d6df9f
|
[
"MIT"
] | null | null | null |
seglibpython/seglib/multicuts/weight_modifier.py
|
DerThorsten/seglib
|
4655079e390e301dd93e53f5beed6c9737d6df9f
|
[
"MIT"
] | null | null | null |
######################################
# general weight sampling
######################################
def gaussPertubation(weight,weightStt,offsetStd=0.0,out=None,n=1):
"""" very stupid sampling"""
pass
######################################
# general weight sampling
######################################
| 16.05
| 66
| 0.367601
| 21
| 321
| 5.619048
| 0.714286
| 0.220339
| 0.355932
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010345
| 0.096573
| 321
| 20
| 67
| 16.05
| 0.396552
| 0.221184
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
c1b12edb01c4318df30f8ec90c76a881fb91acc9
| 48
|
py
|
Python
|
deep_utils/utils/decorators/__init__.py
|
pooya-mohammadi/deep_utils
|
b589d8ab0a8d63f3d3b90c3bc0d4b1b648b8be37
|
[
"MIT"
] | 36
|
2021-11-10T05:17:18.000Z
|
2022-03-27T18:25:10.000Z
|
deep_utils/utils/decorators/__init__.py
|
pooya-mohammadi/deep_utils
|
b589d8ab0a8d63f3d3b90c3bc0d4b1b648b8be37
|
[
"MIT"
] | 1
|
2021-12-03T07:07:18.000Z
|
2022-03-08T09:29:03.000Z
|
deep_utils/utils/decorators/__init__.py
|
pooya-mohammadi/deep_utils
|
b589d8ab0a8d63f3d3b90c3bc0d4b1b648b8be37
|
[
"MIT"
] | 4
|
2021-11-28T07:39:57.000Z
|
2022-03-30T05:46:10.000Z
|
from .main import get_func_time, get_method_time
| 48
| 48
| 0.875
| 9
| 48
| 4.222222
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 48
| 1
| 48
| 48
| 0.863636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c1c33105e0839cf4c5be0ea11a50c3d9a31cd16e
| 33
|
py
|
Python
|
libs/garden/garden.zbarcam/__init__.py
|
Zer0897/keepstock
|
d4bcde7665688827eca3ff0280af2a2fa4eb81d3
|
[
"MIT"
] | null | null | null |
libs/garden/garden.zbarcam/__init__.py
|
Zer0897/keepstock
|
d4bcde7665688827eca3ff0280af2a2fa4eb81d3
|
[
"MIT"
] | null | null | null |
libs/garden/garden.zbarcam/__init__.py
|
Zer0897/keepstock
|
d4bcde7665688827eca3ff0280af2a2fa4eb81d3
|
[
"MIT"
] | null | null | null |
from .zbarcam import ZBarCam
| 16.5
| 28
| 0.727273
| 4
| 33
| 6
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.242424
| 33
| 2
| 29
| 16.5
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c1c7e803ec10470810f8fc78b8d36658d161efd6
| 20
|
py
|
Python
|
ngm/__init__.py
|
calpoly-bioinf/knowledge_driven_modeling
|
dbe55d5bb07f7c5a1834a21fde8833f295e3ac96
|
[
"MIT"
] | null | null | null |
ngm/__init__.py
|
calpoly-bioinf/knowledge_driven_modeling
|
dbe55d5bb07f7c5a1834a21fde8833f295e3ac96
|
[
"MIT"
] | null | null | null |
ngm/__init__.py
|
calpoly-bioinf/knowledge_driven_modeling
|
dbe55d5bb07f7c5a1834a21fde8833f295e3ac96
|
[
"MIT"
] | null | null | null |
from . import base
| 6.666667
| 18
| 0.7
| 3
| 20
| 4.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 20
| 2
| 19
| 10
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c1fab8ac6b170dec1129b889192c1ff046d013e1
| 44,775
|
py
|
Python
|
tests/features/steps/bgp_tests.py
|
Netests/netests
|
1a48bda461761c4ec854d6fa0c38629049009a4a
|
[
"MIT"
] | 14
|
2020-06-08T07:34:59.000Z
|
2022-03-14T08:52:03.000Z
|
tests/features/steps/bgp_tests.py
|
Netests/netests
|
1a48bda461761c4ec854d6fa0c38629049009a4a
|
[
"MIT"
] | null | null | null |
tests/features/steps/bgp_tests.py
|
Netests/netests
|
1a48bda461761c4ec854d6fa0c38629049009a4a
|
[
"MIT"
] | 3
|
2020-06-19T03:57:05.000Z
|
2020-06-22T22:46:42.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
import yaml
import textfsm
from netests.comparators.bgp_compare import _compare_bgp
from netests.mappings import get_bgp_state_brief, get_bgp_peer_uptime
from netests.converters.bgp.arista.api import _arista_bgp_api_converter
from netests.converters.bgp.arista.ssh import _arista_bgp_ssh_converter
from netests.converters.bgp.cumulus.api import _cumulus_bgp_api_converter
from netests.converters.bgp.cumulus.ssh import _cumulus_bgp_ssh_converter
from netests.converters.bgp.extreme_vsp.ssh import _extreme_vsp_bgp_ssh_converter
from netests.converters.bgp.ios.api import _ios_bgp_api_converter
from netests.converters.bgp.ios.nc import _ios_bgp_nc_converter
from netests.converters.bgp.ios.ssh import _ios_bgp_ssh_converter
from netests.converters.bgp.iosxr.nc import _iosxr_bgp_nc_converter
from netests.converters.bgp.iosxr.ssh import _iosxr_bgp_ssh_converter
from netests.converters.bgp.juniper.api import _juniper_bgp_api_converter
from netests.converters.bgp.juniper.nc import _juniper_bgp_nc_converter
from netests.converters.bgp.juniper.ssh import _juniper_bgp_ssh_converter
from netests.converters.bgp.napalm.converter import _napalm_bgp_converter
from netests.converters.bgp.nxos.api import _nxos_bgp_api_converter
from netests.converters.bgp.nxos.api import _nxos_bgp_api_converter
from netests.converters.bgp.nxos.ssh import _nxos_bgp_ssh_converter
from netests.constants import (
NOT_SET,
FEATURES_SRC_PATH,
BGP_SESSIONS_HOST_KEY,
BGP_UPTIME_FORMAT_MS
)
from netests.protocols.bgp import (
BGPSession,
ListBGPSessions,
BGPSessionsVRF,
ListBGPSessionsVRF,
BGP
)
from netests.tools.file import (
open_file,
open_txt_file,
open_json_file,
open_txt_file_as_bytes
)
from behave import given, when, then
@given(u'A network protocols named BGP defined in netests/protocols/bgp.py')
def step_impl(context):
context.test_not_implemented = list()
@given(u'I create a BGP object equals to Arista manually named o0001')
def step_impl(context):
bgp_sessions_vrf_lst = ListBGPSessionsVRF(
list()
)
bgp_sessions_lst = ListBGPSessions(
list()
)
bgp_sessions_lst.bgp_sessions.append(
BGPSession(
src_hostname="leaf03",
peer_ip="100.100.100.100",
peer_hostname=NOT_SET,
remote_as="100",
state_brief=get_bgp_state_brief(
"Idle"
),
session_state="Idle",
state_time=1588518931.27118,
prefix_received=0
)
)
bgp_sessions_vrf_lst.bgp_sessions_vrf.append(
BGPSessionsVRF(
vrf_name="CUSTOMER_WEJOB",
as_number="1111",
router_id="1.2.3.4",
bgp_sessions=bgp_sessions_lst
)
)
bgp_sessions_lst = ListBGPSessions(
list()
)
bgp_sessions_lst.bgp_sessions.append(
BGPSession(
src_hostname="leaf03",
peer_ip="11.11.11.11",
peer_hostname=NOT_SET,
remote_as="11",
state_brief=get_bgp_state_brief(
"Idle"
),
session_state="Idle",
state_time=1588518176.788854,
prefix_received=0
)
)
bgp_sessions_lst.bgp_sessions.append(
BGPSession(
src_hostname="leaf03",
peer_ip="12.12.12.12",
peer_hostname=NOT_SET,
remote_as="12",
state_brief=get_bgp_state_brief(
"Idle"
),
session_state="Idle",
state_time=1588518913.789179,
prefix_received=0
)
)
bgp_sessions_vrf_lst.bgp_sessions_vrf.append(
BGPSessionsVRF(
vrf_name="CUSTOMER_NETESTS",
as_number="1111",
router_id="66.66.66.66",
bgp_sessions=bgp_sessions_lst
)
)
context.o0001 = BGP(
hostname="leaf03",
bgp_sessions_vrf_lst=bgp_sessions_vrf_lst
)
@given(u'I create a BGP object from a Arista API output named o0002')
def step_impl(context):
cmd_output = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/arista/api/"
"arista_api_get_bgp.json"
)
)
context.o0002 = _arista_bgp_api_converter(
hostname="leaf03",
cmd_output=cmd_output,
options={}
)
@given(u'I create a BGP object from a Arista Netconf named o0003')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'I create a BGP object from a Arista SSH output named o0004')
def step_impl(context):
cmd_output=dict()
cmd_output['default'] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/arista/ssh/"
"arista_show_ip_bgp_summary_default.json"
)
)
cmd_output['CUSTOMER_WEJOB'] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/arista/ssh/"
"arista_show_ip_bgp_summary_one_peer.json"
)
)
cmd_output['CUSTOMER_NETESTS']=open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/arista/ssh/"
"arista_show_ip_bgp_summary_many_peers.json"
)
)
context.o0004=_arista_bgp_ssh_converter(
hostname="leaf03",
cmd_output=cmd_output,
options={}
)
@given(u'I create a BGP object equals to Cumulus manually named o0101')
def step_impl(context):
bgp_sessions_vrf_lst = ListBGPSessionsVRF(
list()
)
bgp_sessions_lst = ListBGPSessions(
list()
)
bgp_sessions_lst.bgp_sessions.append(
BGPSession(
src_hostname="leaf01",
peer_ip="10.1.1.2",
peer_hostname=NOT_SET,
remote_as="65102",
state_brief=get_bgp_state_brief(
"Connect"
),
session_state="Connect",
state_time=get_bgp_peer_uptime(
value=0,
format=BGP_UPTIME_FORMAT_MS
),
prefix_received=NOT_SET
)
)
bgp_sessions_vrf_lst.bgp_sessions_vrf.append(
BGPSessionsVRF(
vrf_name="default",
as_number="65101",
router_id="1.1.1.1",
bgp_sessions=bgp_sessions_lst
)
)
bgp_sessions_lst = ListBGPSessions(
list()
)
bgp_sessions_lst.bgp_sessions.append(
BGPSession(
src_hostname="leaf01",
peer_ip="10.1.2.2",
peer_hostname=NOT_SET,
remote_as="65203",
state_brief=get_bgp_state_brief(
"Connect"
),
session_state="Connect",
state_time=get_bgp_peer_uptime(
value=0,
format=BGP_UPTIME_FORMAT_MS
),
prefix_received=NOT_SET
)
)
bgp_sessions_vrf_lst.bgp_sessions_vrf.append(
BGPSessionsVRF(
vrf_name="IOS_XR_VRF",
as_number="65201",
router_id="10.10.10.10",
bgp_sessions=bgp_sessions_lst
)
)
context.o0101 = BGP(
hostname="leaf01",
bgp_sessions_vrf_lst=bgp_sessions_vrf_lst
)
@given(u'I create a BGP object from a Cumulus API output named o0102')
def step_impl(context):
cmd_output = dict()
cmd_output['default'] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/cumulus/api/"
"cumulus_api_get_vrf_default.json"
)
)
cmd_output['IOS_XR_VRF'] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/cumulus/api/"
"cumulus_api_get_vrf_xyz.json"
)
)
context.o0102 = _cumulus_bgp_api_converter(
hostname="leaf01",
cmd_output=cmd_output,
options={}
)
@given(u'I create a BGP object from a Cumulus Netconf named o0103')
def step_impl(context):
print("Cumulus BGP with Netconf not possible -> Not tested")
@given(u'I create a BGP object from a Cumulus SSH output named o0104')
def step_impl(context):
cmd_output = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/cumulus/ssh/"
"cumulus_ssh_get_vrf.json"
)
)
context.o0104 = _cumulus_bgp_api_converter(
hostname="leaf01",
cmd_output=cmd_output,
options={}
)
@given(u'I create a BGP object equals to Extreme VSP manually named o0201')
def step_impl(context):
bgp_sessions_vrf_lst = ListBGPSessionsVRF(
list()
)
bgp_sessions_lst = ListBGPSessions(
list()
)
bgp_sessions_lst.bgp_sessions.append(
BGPSession(
src_hostname="spine02",
peer_ip="10.1.1.1",
peer_hostname=NOT_SET,
remote_as="65101",
state_brief=get_bgp_state_brief(
"Idle"
),
session_state="Idle",
state_time=get_bgp_peer_uptime(
value="10892000",
format=BGP_UPTIME_FORMAT_MS
),
prefix_received=NOT_SET
)
)
bgp_sessions_vrf_lst.bgp_sessions_vrf.append(
BGPSessionsVRF(
vrf_name="default",
as_number="65101",
router_id="2.2.2.2",
bgp_sessions=bgp_sessions_lst
)
)
bgp_sessions_lst = ListBGPSessions(
list()
)
bgp_sessions_lst.bgp_sessions.append(
BGPSession(
src_hostname="spine02",
peer_ip="10.20.20.2",
peer_hostname=NOT_SET,
remote_as="65202",
state_brief=get_bgp_state_brief(
"Idle"
),
session_state="Idle",
state_time=get_bgp_peer_uptime(
value=0,
format=BGP_UPTIME_FORMAT_MS
),
prefix_received=NOT_SET
)
)
bgp_sessions_vrf_lst.bgp_sessions_vrf.append(
BGPSessionsVRF(
vrf_name="mgmt_vrf",
as_number="65101",
router_id="20.20.20.20",
bgp_sessions=bgp_sessions_lst
)
)
context.o0201 = BGP(
hostname="spine02",
bgp_sessions_vrf_lst=bgp_sessions_vrf_lst
)
@given(u'I create a BGP object from a Extreme VSP API output named o0202')
def step_impl(context):
print("Extreme VSP BGP with Netconf not possible -> Not tested")
@given(u'I create a BGP object from a Extreme VSP Netconf output named o0203')
def step_impl(context):
print("Extreme VSP BGP with Netconf not possible -> Not tested")
@given(u'I create a BGP object from a Extreme VSP SSH output named o0204')
def step_impl(context):
dict_output = dict()
dict_output['default'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/extreme_vsp/ssh/"
"extreme_vsp_show_ip_bgp_summary.txt"
)
)
dict_output['mgmt_vrf'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/extreme_vsp/ssh/"
"extreme_vsp_show_ip_bgp_summary_vrf.txt"
)
)
context.o0204 = _extreme_vsp_bgp_ssh_converter(
hostname="spine02",
cmd_output=dict_output,
options={}
)
@given(u'I create a BGP object equals to IOS manually named o0301')
def step_impl(context):
bgp_sessions_vrf_lst = ListBGPSessionsVRF(
list()
)
bgp_sessions_lst = ListBGPSessions(
list()
)
bgp_sessions_lst.bgp_sessions.append(
BGPSession(
src_hostname="leaf05",
peer_ip="33.3.3.3",
peer_hostname=NOT_SET,
remote_as="3",
state_brief=get_bgp_state_brief(
"Idle"
),
session_state="Idle",
state_time=get_bgp_peer_uptime(
value=0,
format=BGP_UPTIME_FORMAT_MS
),
prefix_received=NOT_SET
)
)
bgp_sessions_lst.bgp_sessions.append(
BGPSession(
src_hostname="leaf05",
peer_ip="33.33.33.33",
peer_hostname=NOT_SET,
remote_as="3",
state_brief=get_bgp_state_brief(
"Idle"
),
session_state="Idle",
state_time=get_bgp_peer_uptime(
value=0,
format=BGP_UPTIME_FORMAT_MS
),
prefix_received=NOT_SET
)
)
bgp_sessions_vrf_lst.bgp_sessions_vrf.append(
BGPSessionsVRF(
vrf_name="CUSTOMER_APPLE",
as_number="33333",
router_id="33.33.33.33",
bgp_sessions=bgp_sessions_lst
)
)
bgp_sessions_lst = ListBGPSessions(
list()
)
bgp_sessions_lst.bgp_sessions.append(
BGPSession(
src_hostname="leaf05",
peer_ip="15.15.15.15",
peer_hostname=NOT_SET,
remote_as="15",
state_brief=get_bgp_state_brief(
"fsm-idle"
),
session_state="fsm-idle",
state_time=get_bgp_peer_uptime(
value=0,
format=BGP_UPTIME_FORMAT_MS
),
prefix_received=NOT_SET
)
)
bgp_sessions_vrf_lst.bgp_sessions_vrf.append(
BGPSessionsVRF(
vrf_name="CUSTOMER_NETESTS",
as_number="33333",
router_id="33.33.33.33",
bgp_sessions=bgp_sessions_lst
)
)
context.o0301 = BGP(
hostname="leaf05",
bgp_sessions_vrf_lst=bgp_sessions_vrf_lst
)
@given(u'I create a BGP object from a IOS API output named o0302')
def step_impl(context):
dict_output = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/ios/api/"
"ios_api_get_bgp.json"
)
)
context.o0302 = _ios_bgp_api_converter(
hostname="leaf05",
cmd_output=dict_output,
options={}
)
@given(u'I create a BGP object from a IOS Netconf named o0303')
def step_impl(context):
dict_output = open_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/ios/netconf/"
"ios_nc_get_bgp.xml"
)
)
context.o0303 = _ios_bgp_nc_converter(
hostname="leaf05",
cmd_output=dict_output,
options={}
)
@given(u'I create a BGP object from a IOS SSH named o0304')
def step_impl(context):
dict_output = dict()
dict_output['CUSTOMER_APPLE'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/ios/ssh/"
"ios_ssh_get_bgp_vrf.txt"
)
)
dict_output['CUSTOMER_NETESTS'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/ios/ssh/"
"ios_ssh_get_bgp_vrf_2.txt"
)
)
context.o0304 = _ios_bgp_ssh_converter(
hostname="leaf05",
cmd_output=dict_output,
options={}
)
@given(u'I create a BGP object equals to IOS-XR manually named o0401')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'I create a BGP object from a IOS-XR API output named o0402')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'I create a BGP object from a IOS-XR Netconf output named o0403')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'I create a BGP object from a IOS-XR SSH output named o0404')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'I create a BGP object equals IOS-XR multi manually output named o0405')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'I create a BGP object from a IOS-XR multi Netconf output named o0406')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'I create a BGP object equals to IOS-XR no config manually named o0411')
def step_impl(context):
context.o0411 = BGP(
hostname="spine03",
bgp_sessions_vrf_lst=list()
)
@given(u'I create a BGP object from a IOS-XR no config API output named o0412')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'I create a BGP object from a IOS-XR no config Netconf output named o0413')
def step_impl(context):
dict_output = open_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/iosxr/netconf/"
"iosxr_nc_get_bgp_no_config.xml"
)
)
context.o0413 = _iosxr_bgp_nc_converter(
hostname="spine03",
cmd_output=dict_output,
options={}
)
@given(u'I create a BGP object from a IOS-XR no config SSH output named o0414')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'I create a BGP object equals to IOS-XR one vrf manually named o0421')
def step_impl(context):
bgp_sessions_vrf_lst = ListBGPSessionsVRF(
list()
)
bgp_sessions_lst = ListBGPSessions(
list()
)
bgp_sessions_lst.bgp_sessions.append(
BGPSession(
src_hostname="spine03",
peer_ip="15.15.15.15",
peer_hostname=NOT_SET,
remote_as="1515",
state_brief="DOWN",
session_state="Active",
state_time=NOT_SET,
prefix_received=NOT_SET
)
)
bgp_sessions_vrf_lst.bgp_sessions_vrf.append(
BGPSessionsVRF(
vrf_name="CUSTOMER_NETESTS",
as_number="1515",
router_id="2.2.2.2",
bgp_sessions=bgp_sessions_lst
)
)
context.o0421 = BGP(
hostname="spine03",
bgp_sessions_vrf_lst=bgp_sessions_vrf_lst
)
@given(u'I create a BGP object from a IOS-XR one vrf config API output named o0422')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'I create a BGP object from a IOS-XR one vrf config Netconf output named o0423')
def step_impl(context):
dict_output = open_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/iosxr/netconf/"
"iosxr_nc_get_bgp_one_vrf.xml"
)
)
context.o0423 = _iosxr_bgp_nc_converter(
hostname="spine03",
cmd_output=dict_output,
options={}
)
@given(u'I create a BGP object from a IOS-XR one vrf config SSH output named o0424')
def step_impl(context):
dict_output = dict()
dict_output['default'] = dict()
dict_output['CUSTOMER_NETESTS'] = dict()
dict_output['CUSTOMER_NETESTS']['peers'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/iosxr/ssh/"
"iosxr_cli_get_bgp_peers_vrf.txt"
)
)
dict_output['CUSTOMER_NETESTS']['rid'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/iosxr/ssh/"
"iosxr_cli_get_bgp_rid_vrf.txt"
)
)
dict_output['default']['peers'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/iosxr/ssh/"
"iosxr_cli_get_bgp_peers.txt"
)
)
dict_output['default']['rid'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/iosxr/ssh/"
"iosxr_cli_get_bgp_rid.txt"
)
)
context.o0424 = _iosxr_bgp_ssh_converter(
hostname="spine03",
cmd_output=dict_output,
options={}
)
@given(u'I create a BGP object equals to Juniper manually named o0501')
def step_impl(context):
bgp_sessions_vrf_lst = ListBGPSessionsVRF(
list()
)
bgp_sessions_lst = ListBGPSessions(
list()
)
bgp_sessions_lst.bgp_sessions.append(
BGPSession(
src_hostname="leaf04",
peer_ip="10.1.1.1",
peer_hostname=NOT_SET,
remote_as="65333",
state_brief=get_bgp_state_brief(
"Idle"
),
session_state="Idle",
state_time=NOT_SET,
prefix_received=NOT_SET
)
)
bgp_sessions_lst.bgp_sessions.append(
BGPSession(
src_hostname="leaf04",
peer_ip="10.2.2.2",
peer_hostname=NOT_SET,
remote_as="65333",
state_brief=get_bgp_state_brief(
"Idle"
),
session_state="Idle",
state_time=NOT_SET,
prefix_received=NOT_SET
)
)
bgp_sessions_vrf_lst.bgp_sessions_vrf.append(
BGPSessionsVRF(
vrf_name="CUSTOMER_AWS",
as_number="65444",
router_id="9.9.9.9",
bgp_sessions=bgp_sessions_lst
)
)
context.o0501 = BGP(
hostname="leaf04",
bgp_sessions_vrf_lst=bgp_sessions_vrf_lst
)
@given(u'I create a BGP object from a Juniper API output named o0502')
def step_impl(context):
dict_output = dict()
dict_output['default'] = dict()
dict_output['default']['bgp'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/juniper/api/"
"juniper_api_get_bgp_peers.xml"
)
)
dict_output['default']['rid'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/juniper/api/"
"juniper_api_get_bgp_rid.xml"
)
)
dict_output['CUSTOMER_AWS'] = dict()
dict_output['CUSTOMER_AWS']['bgp'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/juniper/api/"
"juniper_api_get_bgp_peers_vrf.xml"
)
)
dict_output['CUSTOMER_AWS']['rid'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/juniper/api/"
"juniper_api_get_bgp_rid_vrf.xml"
)
)
context.o0502 = _juniper_bgp_api_converter(
hostname="leaf04",
cmd_output=dict_output,
options={}
)
@given(u'I create a BGP object from a Juniper Netconf output named o0503')
def step_impl(context):
dict_output = dict()
dict_output['default'] = dict()
dict_output['default']['bgp'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/juniper/netconf/"
"juniper_nc_get_bgp_peers.xml"
)
)
dict_output['default']['rid'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/juniper/netconf/"
"juniper_nc_get_bgp_rid.xml"
)
)
dict_output['CUSTOMER_AWS'] = dict()
dict_output['CUSTOMER_AWS']['bgp'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/juniper/netconf/"
"juniper_nc_get_bgp_peers_vrf.xml"
)
)
dict_output['CUSTOMER_AWS']['rid'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/juniper/netconf/"
"juniper_nc_get_bgp_rid_vrf.xml"
)
)
context.o0503 = _juniper_bgp_nc_converter(
hostname="leaf04",
cmd_output=dict_output,
options={}
)
@given(u'I create a BGP object from a Juniper SSH output named o0504')
def step_impl(context):
dict_output = dict()
dict_output['default'] = dict()
dict_output['default']['bgp'] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/juniper/ssh/"
"juniper_cli_get_bgp_peers.json"
)
)
dict_output['default']['rid'] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/juniper/ssh/"
"juniper_cli_get_bgp_rid.json"
)
)
dict_output['CUSTOMER_AWS'] = dict()
dict_output['CUSTOMER_AWS']['bgp'] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/juniper/ssh/"
"juniper_cli_get_bgp_peers_vrf.json"
)
)
dict_output['CUSTOMER_AWS']['rid'] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/juniper/ssh/"
"juniper_cli_get_bgp_rid_vrf.json"
)
)
context.o0504 = _juniper_bgp_ssh_converter(
hostname="leaf04",
cmd_output=dict_output,
options={}
)
@given(u'I create a BGP object equals to NAPALM manually named o0601')
def step_impl(context):
print("NAPALM BGP doesn't retrieve ROUTER-ID -> Not tested")
@given(u'I create a BGP object from a NAPALM output named o0602')
def step_impl(context):
cmd_output = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/napalm/"
"napalm_get_bgp.json"
)
)
context.o0602 = _napalm_bgp_converter(
hostname="leaf04",
cmd_output=cmd_output,
options={}
)
@given(u'I create a BGP object equals to NXOS manually named o0701')
def step_impl(context):
bgp_sessions_vrf_lst = ListBGPSessionsVRF(
list()
)
bgp_sessions_lst = ListBGPSessions(
list()
)
bgp_sessions_lst.bgp_sessions.append(
BGPSession(
src_hostname="leaf02",
peer_ip="172.16.0.2",
peer_hostname=NOT_SET,
remote_as="65535",
state_brief=get_bgp_state_brief(
"Idle"
),
session_state="Idle",
state_time=NOT_SET,
prefix_received=NOT_SET
)
)
bgp_sessions_vrf_lst.bgp_sessions_vrf.append(
BGPSessionsVRF(
vrf_name="default",
as_number="65535",
router_id="172.16.0.1",
bgp_sessions=bgp_sessions_lst
)
)
bgp_sessions_lst = ListBGPSessions(
list()
)
bgp_sessions_lst.bgp_sessions.append(
BGPSession(
src_hostname="leaf02",
peer_ip="11.1.1.1",
peer_hostname=NOT_SET,
remote_as="1",
state_brief=get_bgp_state_brief(
"Idle"
),
session_state="Idle",
state_time=NOT_SET,
prefix_received=NOT_SET
)
)
bgp_sessions_lst.bgp_sessions.append(
BGPSession(
src_hostname="leaf02",
peer_ip="22.2.2.2",
peer_hostname=NOT_SET,
remote_as="2",
state_brief=get_bgp_state_brief(
"Idle"
),
session_state="Idle",
state_time=NOT_SET,
prefix_received=NOT_SET
)
)
bgp_sessions_vrf_lst.bgp_sessions_vrf.append(
BGPSessionsVRF(
vrf_name="CUSTOMER_GOOGLE",
as_number="65535",
router_id="0.0.0.0",
bgp_sessions=bgp_sessions_lst
)
)
bgp_sessions_lst = ListBGPSessions(
list()
)
context.o0701 = BGP(
hostname="leaf02",
bgp_sessions_vrf_lst=bgp_sessions_vrf_lst
)
@given(u'I create a BGP object from a NXOS API output named o0702')
def step_impl(context):
dict_output = dict()
dict_output['default'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/nxos/api/"
"nxos_api_get_bgp_default.json"
)
)
dict_output['management'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/nxos/api/"
"nxos_api_get_bgp_vrf_mgmt.json"
)
)
dict_output['CUSTOMER_GOOGLE'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/nxos/api/"
"nxos_api_get_bgp_vrf_customer.json"
)
)
context.o0702 = _nxos_bgp_api_converter(
hostname="leaf02",
cmd_output=dict_output,
options={}
)
@given(u'I create a BGP object from a NXOS Netconf output named o0703')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'I create a BGP object from a NXOS SSH output named o0704')
def step_impl(context):
dict_output = dict()
dict_output['default'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/nxos/ssh/"
"nxos_show_bgp_session_vrf_default.json"
)
)
dict_output['management'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/nxos/ssh/"
"nxos_show_bgp_session_vrf_mgmt.json"
)
)
dict_output['CUSTOMER_GOOGLE'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/bgp/nxos/ssh/"
"nxos_show_bgp_session_vrf_customer.json"
)
)
context.o0704 = _nxos_bgp_ssh_converter(
hostname="leaf02",
cmd_output=dict_output,
options={}
)
@given(u'BGP o0001 should be equal to o0002')
def step_impl(context):
assert context.o0001 == context.o0002
@given(u'BGP o0001 should be equal to o0003')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP o0001 should be equal to o0004')
def step_impl(context):
assert context.o0001 == context.o0004
@given(u'BGP o0002 should be equal to o0003')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP o0002 should be equal to o0004')
def step_impl(context):
assert context.o0002 == context.o0004
@given(u'BGP o0003 should be equal to o0004')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP YAML file should be equal to o0002')
def step_impl(context):
assert _compare_bgp(
host_keys=BGP_SESSIONS_HOST_KEY,
hostname="leaf03",
groups=['eos'],
bgp_host_data=context.o0002,
test=True
)
@given(u'BGP YAML file should be equal to o0003')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP YAML file should be equal to o0004')
def step_impl(context):
assert _compare_bgp(
host_keys=BGP_SESSIONS_HOST_KEY,
hostname="leaf03",
groups=['eos'],
bgp_host_data=context.o0004,
test=True
)
@given(u'BGP o0101 should be equal to o0102')
def step_impl(context):
assert context.o0101 == context.o0102
@given(u'BGP o0101 should be equal to o0103')
def step_impl(context):
print("Cumulus BGP with Netconf not possible -> Not tested")
@given(u'BGP o0101 should be equal to o0104')
def step_impl(context):
assert context.o0101 == context.o0102
@given(u'BGP o0102 should be equal to o0103')
def step_impl(context):
print("Cumulus BGP with Netconf not possible -> Not tested")
@given(u'BGP o0102 should be equal to o0104')
def step_impl(context):
assert context.o0102 == context.o0104
@given(u'BGP o0103 should be equal to o0104')
def step_impl(context):
print("Cumulus BGP with Netconf not possible -> Not tested")
@given(u'BGP YAML file should be equal to o0102')
def step_impl(context):
assert _compare_bgp(
host_keys=BGP_SESSIONS_HOST_KEY,
hostname="leaf01",
groups=['linux'],
bgp_host_data=context.o0102,
test=True
)
@given(u'BGP YAML file should be equal to o0103')
def step_impl(context):
print("Cumulus BGP with Netconf not possible -> Not tested")
@given(u'BGP YAML file should be equal to o0104')
def step_impl(context):
assert _compare_bgp(
host_keys=BGP_SESSIONS_HOST_KEY,
hostname="leaf01",
groups=['linux'],
bgp_host_data=context.o0104,
test=True
)
@given(u'BGP o0201 should be equal to o0202')
def step_impl(context):
print("Extreme VSP BGP with Netconf not possible -> Not tested")
@given(u'BGP o0201 should be equal to o0203')
def step_impl(context):
print("Extreme VSP BGP with Netconf not possible -> Not tested")
@given(u'BGP o0201 should be equal to o0204')
def step_impl(context):
assert context.o0201 == context.o0204
@given(u'BGP o0202 should be equal to o0203')
def step_impl(context):
print("Extreme VSP BGP with Netconf not possible -> Not tested")
@given(u'BGP o0202 should be equal to o0204')
def step_impl(context):
print("Extreme VSP BGP with Netconf not possible -> Not tested")
@given(u'BGP o0203 should be equal to o0204')
def step_impl(context):
print("Extreme VSP BGP with Netconf not possible -> Not tested")
@given(u'BGP YAML file should be equal to o0202')
def step_impl(context):
print("Extreme VSP BGP with Netconf not possible -> Not tested")
@given(u'BGP YAML file should be equal to o0203')
def step_impl(context):
print("Extreme VSP BGP with Netconf not possible -> Not tested")
@given(u'BGP YAML file should be equal to o0204')
def step_impl(context):
assert _compare_bgp(
host_keys=BGP_SESSIONS_HOST_KEY,
hostname="spine02",
groups=['extreme_vsp'],
bgp_host_data=context.o0204,
test=True
)
@given(u'BGP o0301 should be equal to o0302')
def step_impl(context):
assert context.o0301 == context.o0302
@given(u'BGP o0301 should be equal to o0303')
def step_impl(context):
assert context.o0301 == context.o0303
@given(u'BGP o0301 should be equal to o0304')
def step_impl(context):
assert context.o0301 == context.o0304
@given(u'BGP o0302 should be equal to o0303')
def step_impl(context):
assert context.o0302 == context.o0303
@given(u'BGP o0302 should be equal to o0304')
def step_impl(context):
assert context.o0302 == context.o0304
@given(u'BGP o0303 should be equal to o0304')
def step_impl(context):
assert context.o0303 == context.o0304
@given(u'BGP YAML file should be equal to o0302')
def step_impl(context):
assert _compare_bgp(
host_keys=BGP_SESSIONS_HOST_KEY,
hostname="leaf05",
groups=['ios'],
bgp_host_data=context.o0302,
test=True
)
@given(u'BGP YAML file should be equal to o0303')
def step_impl(context):
assert _compare_bgp(
host_keys=BGP_SESSIONS_HOST_KEY,
hostname="leaf05",
groups=['ios'],
bgp_host_data=context.o0303,
test=True
)
@given(u'BGP YAML file should be equal to o0304')
def step_impl(context):
assert _compare_bgp(
host_keys=BGP_SESSIONS_HOST_KEY,
hostname="leaf05",
groups=['ios'],
bgp_host_data=context.o0304,
test=True
)
@given(u'BGP o0401 should be equal to o0402')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP o0401 should be equal to o0403')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP o0401 should be equal to o0404')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP o0402 should be equal to o0403')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP o0402 should be equal to o0404')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP o0403 should be equal to o0404')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP o0405 should be equal to o0406')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP YAML file should be equal to o0402')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP YAML file should be equal to o0403')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP YAML file should be equal to o0404')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP o0411 should be equal to o0412')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP o0411 should be equal to o0413')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP o0411 should be equal to o0414')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP o0412 should be equal to o0413')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP o0412 should be equal to o0414')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP o0413 should be equal to o0414')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP o0421 should be equal to o0422')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP o0421 should be equal to o0423')
def step_impl(context):
print("Cisco IOS-XR doesn't get STATE => Not tested")
#assert context.o0421 == context.o0423
@given(u'BGP o0421 should be equal to o0424')
def step_impl(context):
assert context.o0421 == context.o0424
@given(u'BGP o0422 should be equal to o0423')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP o0422 should be equal to o0424')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP o0423 should be equal to o0424')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP o0501 should be equal to o0502')
def step_impl(context):
assert context.o0501 == context.o0502
@given(u'BGP o0501 should be equal to o0503')
def step_impl(context):
assert context.o0501 == context.o0503
@given(u'BGP o0501 should be equal to o0504')
def step_impl(context):
assert context.o0501 == context.o0504
@given(u'BGP o0502 should be equal to o0503')
def step_impl(context):
assert context.o0502 == context.o0503
@given(u'BGP o0502 should be equal to o0504')
def step_impl(context):
assert context.o0502 == context.o0504
@given(u'BGP o0503 should be equal to o0504')
def step_impl(context):
assert context.o0503 == context.o0504
@given(u'BGP YAML file should be equal to o0502')
def step_impl(context):
assert _compare_bgp(
host_keys=BGP_SESSIONS_HOST_KEY,
hostname="leaf04",
groups=['junos'],
bgp_host_data=context.o0502,
test=True
)
@given(u'BGP YAML file should be equal to o0503')
def step_impl(context):
assert _compare_bgp(
host_keys=BGP_SESSIONS_HOST_KEY,
hostname="leaf04",
groups=['junos'],
bgp_host_data=context.o0503,
test=True
)
@given(u'BGP YAML file should be equal to o0504')
def step_impl(context):
assert _compare_bgp(
host_keys=BGP_SESSIONS_HOST_KEY,
hostname="leaf04",
groups=['junos'],
bgp_host_data=context.o0504,
test=True
)
@given(u'BGP o0601 should be equal to o0602')
def step_impl(context):
print("NAPALM BGP doesn't retrieve ROUTER-ID -> Not tested")
@given(u'BGP o0701 should be equal to o0702')
def step_impl(context):
assert context.o0701 == context.o0702
@given(u'BGP o0701 should be equal to o0703')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP o0701 should be equal to o0704')
def step_impl(context):
assert context.o0701 == context.o0704
@given(u'BGP o0702 should be equal to o0703')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP o0702 should be equal to o0704')
def step_impl(context):
assert context.o0702 == context.o0704
@given(u'BGP o0703 should be equal to o0704')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP YAML file should be equal to o0702')
def step_impl(context):
assert _compare_bgp(
host_keys=BGP_SESSIONS_HOST_KEY,
hostname="leaf02",
groups=['nxos'],
bgp_host_data=context.o0702,
test=True
)
@given(u'BGP YAML file should be equal to o0703')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'BGP YAML file should be equal to o0704')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'I create a BGP object to test compare function named o9999')
def step_impl(context):
bgp_sessions_vrf_lst = ListBGPSessionsVRF(
list()
)
bgp_sessions_lst = ListBGPSessions(
list()
)
bgp_sessions_lst.bgp_sessions.append(
BGPSession(
src_hostname="leaf04",
peer_ip="10.1.1.1",
peer_hostname=NOT_SET,
remote_as="65333",
state_brief=get_bgp_state_brief(
"Idle"
),
session_state="Idle",
state_time=NOT_SET,
prefix_received=NOT_SET
)
)
bgp_sessions_lst.bgp_sessions.append(
BGPSession(
src_hostname="leaf04",
peer_ip="10.2.2.2",
peer_hostname=NOT_SET,
remote_as="65333",
state_brief=get_bgp_state_brief(
"Idle"
),
session_state="Idle",
state_time="12:12",
prefix_received=123
)
)
bgp_sessions_vrf_lst.bgp_sessions_vrf.append(
BGPSessionsVRF(
vrf_name="CUSTOMER_AWS",
as_number="65444",
router_id="9.9.9.9",
bgp_sessions=bgp_sessions_lst
)
)
context.o9999 = BGP(
hostname="leaf04",
bgp_sessions_vrf_lst=bgp_sessions_vrf_lst
)
@given(u'I create a BGP object to test compare function with <session_state> named o9982')
def step_impl(context):
options = {
'compare': {
'session_state': True
}
}
context.o9982 = create_bgp_obj_for_compare(options)
@given(u'I create a BGP object to test compare equal to o9982 without <session_state> named o9983')
def step_impl(context):
options = {}
context.o9983 = create_bgp_obj_for_compare(options)
@given(u'I compare BGP o9982 and o9999 with a personal function - should not work')
def step_impl(context):
assert context.o9982 != context.o9999
@given(u'I compare BGP o9983 and o9999 with a personal function - should work')
def step_impl(context):
assert context.o9983 == context.o9999
@given(u'I create a BGP object to test compare function with <state_time> named o9984')
def step_impl(context):
options = {
'compare': {
'state_time': True
}
}
context.o9984 = create_bgp_obj_for_compare(options)
@given(u'I create a BGP object to test compare equal to o9984 without <state_time> named o9985')
def step_impl(context):
options = {}
context.o9985 = create_bgp_obj_for_compare(options)
@given(u'I compare BGP o9984 and o9999 with a personal function - should not work')
def step_impl(context):
assert context.o9984 != context.o9999
@given(u'I compare BGP o9985 and o9999 with a personal function - should work')
def step_impl(context):
assert context.o9985 == context.o9999
@given(u'I create a BGP object to test compare function with <prefix_received> named o9986')
def step_impl(context):
options = {
'compare': {
'prefix_received': True
}
}
context.o9986 = create_bgp_obj_for_compare(options)
@given(u'I create a BGP object to test compare equal to o9986 without <prefix_received> named o9987')
def step_impl(context):
options = {}
context.o9987 = create_bgp_obj_for_compare(options)
@given(u'I compare BGP o9986 and o9999 with a personal function - should not work')
def step_impl(context):
assert context.o9986 != context.o9999
@given(u'I compare BGP o9987 and o9999 with a personal function - should work')
def step_impl(context):
assert context.o9987 == context.o9999
def create_bgp_obj_for_compare(options):
bgp_sessions_vrf_lst = ListBGPSessionsVRF(
list()
)
bgp_sessions_lst = ListBGPSessions(
list()
)
bgp_sessions_lst.bgp_sessions.append(
BGPSession(
src_hostname="leaf04",
peer_ip="10.1.1.1",
peer_hostname=NOT_SET,
remote_as="65333",
state_brief=get_bgp_state_brief(
"Idle"
),
session_state="Idle",
state_time=NOT_SET,
prefix_received=NOT_SET,
options=options
)
)
bgp_sessions_lst.bgp_sessions.append(
BGPSession(
src_hostname="leaf04",
peer_ip="10.2.2.2",
peer_hostname=NOT_SET,
remote_as="65333",
state_brief=get_bgp_state_brief(
"WRONG_STATE"
),
session_state="UNKNOW_STATE",
state_time="DJEIOJDOWIEJIW",
prefix_received="DJOEWDJEWODJEOWIDJ",
options=options
)
)
bgp_sessions_vrf_lst.bgp_sessions_vrf.append(
BGPSessionsVRF(
vrf_name="CUSTOMER_AWS",
as_number="65444",
router_id="9.9.9.9",
bgp_sessions=bgp_sessions_lst
)
)
return BGP(
hostname="leaf04",
bgp_sessions_vrf_lst=bgp_sessions_vrf_lst
)
@given(u'I Finish my BGP tests and list tests not implemented')
def step_impl(context):
assert _compare_bgp(
host_keys=BGP_SESSIONS_HOST_KEY,
hostname="leaf02",
groups=['nxos'],
bgp_host_data=context.o0704,
test=True
)
| 26.307286
| 101
| 0.632205
| 5,857
| 44,775
| 4.567355
| 0.04405
| 0.062914
| 0.054278
| 0.088819
| 0.872678
| 0.847408
| 0.824418
| 0.796494
| 0.775223
| 0.764308
| 0
| 0.053065
| 0.26852
| 44,775
| 1,701
| 102
| 26.322751
| 0.763709
| 0.001787
| 0
| 0.576557
| 0
| 0
| 0.271704
| 0.060951
| 0
| 0
| 0
| 0
| 0.030769
| 1
| 0.097436
| false
| 0
| 0.019048
| 0
| 0.117216
| 0.012454
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e735f23db95fb8090cad4901489cc47f6ededa35
| 488
|
py
|
Python
|
python/1108_Defanging_an_IP_Address.py
|
dvlpsh/leetcode-1
|
f965328af72113ac8a5a9d6624868c1502be937b
|
[
"MIT"
] | 4,416
|
2016-03-30T15:02:26.000Z
|
2022-03-31T16:31:03.000Z
|
python/1108_Defanging_an_IP_Address.py
|
YinpuLi/leetcode-6
|
1371de2631d745efba39de41b51c3424e35da434
|
[
"MIT"
] | 20
|
2018-11-17T13:46:25.000Z
|
2022-03-13T05:37:06.000Z
|
python/1108_Defanging_an_IP_Address.py
|
YinpuLi/leetcode-6
|
1371de2631d745efba39de41b51c3424e35da434
|
[
"MIT"
] | 1,374
|
2017-05-26T15:44:30.000Z
|
2022-03-30T19:21:02.000Z
|
class Solution:
def defangIPaddr(self, address: str) -> str:
# replace
return address.replace('.', '[.]')
# def defangIPaddr(self, address: str) -> str:
# # split and join
# return '[.]'.join(address.split('.'))
# def defangIPaddr(self, address: str) -> str:
# # replace
# return re.sub('\.', '[.]', address)
# def defangIPaddr(self, address: str) -> str:
# return ''.join('[.]' if c == '.' else c for c in address)
| 37.538462
| 67
| 0.528689
| 52
| 488
| 4.961538
| 0.365385
| 0.232558
| 0.294574
| 0.403101
| 0.596899
| 0.596899
| 0.348837
| 0.348837
| 0
| 0
| 0
| 0
| 0.276639
| 488
| 12
| 68
| 40.666667
| 0.730878
| 0.653689
| 0
| 0
| 0
| 0
| 0.025478
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
e7623d08cbe00353b4f7434b681dab91911f2731
| 5,697
|
py
|
Python
|
PrimeDiscriminatorConv.py
|
lihaifeng215/Prime-Prediction
|
b5e31a5637ec303630e51a8a75c7ef0f0aa80b84
|
[
"MIT"
] | null | null | null |
PrimeDiscriminatorConv.py
|
lihaifeng215/Prime-Prediction
|
b5e31a5637ec303630e51a8a75c7ef0f0aa80b84
|
[
"MIT"
] | null | null | null |
PrimeDiscriminatorConv.py
|
lihaifeng215/Prime-Prediction
|
b5e31a5637ec303630e51a8a75c7ef0f0aa80b84
|
[
"MIT"
] | null | null | null |
import numpy as np
import keras
from keras.models import Sequential
from keras.layers import Dense,BatchNormalization,Conv2D,Conv2DTranspose,Activation,Reshape,Flatten
from keras.utils.np_utils import to_categorical
import tensorflow as tf
import time
start = time.time()
tfconfig = tf.ConfigProto(allow_soft_placement=True)
tfconfig.gpu_options.allow_growth = True
tf.Session(config=tfconfig)
# super parameter:
learningRate = [0.1,0.01,0.001,0.0001,0.00001,0.000001]
batchSize = 5000
epochs = 5000
activation = "elu"
loss = "categorical_crossentropy"
print("learningRate:",learningRate)
print("batchSize :",batchSize)
print("epochs :",epochs)
print("activation :",activation)
print("loss :",loss)
# load data
data = np.loadtxt("file/BalancePrimeData_1_million.txt",delimiter=',',dtype=int)
trainData_X = data[:-10000,0]
trainData_Y = data[:-10000,1]
testData_X = data[-10000:,0]
testData_Y = data[-10000:,1]
trainData_Y = to_categorical(trainData_Y)
testData_Y = to_categorical(testData_Y)
# 建立模型
model = Sequential()
model.add(Reshape((1,1,1),input_shape=(1,)))
# model.add(BatchNormalization())
model.add(Conv2DTranspose(5,(3,3),activation=activation))
model.add(BatchNormalization())
model.add(Conv2DTranspose(5,(3,3),activation=activation))
model.add(BatchNormalization())
model.add(Conv2D(5,(3,3),activation=activation))
model.add(BatchNormalization())
model.add(Conv2D(5,(3,3),activation=activation))
model.add(BatchNormalization())
model.add(Conv2D(2,(1,1),activation=activation))
model.add(BatchNormalization())
model.add(Flatten())
model.add(Activation("softmax"))
for i in learningRate:
print('learning rate for latitude is :', i)
# 编译模型
model.compile(optimizer=keras.optimizers.Adam(lr=i), loss=loss)
if i != 0.1:
# load parameter
model.load_weights('logConv/PrimeDiscriminatorConv.hdf5')
print("model compiled!")
# save model
tensorboard = keras.callbacks.TensorBoard(log_dir='logConv', write_images=True, histogram_freq=0)
logger = keras.callbacks.CSVLogger('logConv/log.csv', separator=',', append=False)
earlystop = keras.callbacks.EarlyStopping(monitor='loss', patience=0, verbose=0, mode='auto')
model_saver = keras.callbacks.ModelCheckpoint('logConv/PrimeDiscriminatorConv.hdf5', monitor='loss', verbose=2,
save_best_only=True,
save_weights_only=True, mode='auto', period=1)
# training
model.fit(trainData_X,trainData_Y,batch_size=batchSize,epochs=epochs,verbose=2,validation_data=[testData_X,testData_Y],callbacks=[tensorboard,logger,model_saver])
# testing
output = model.predict(testData_X)
print("the output is:\n",output)
print("the result:",np.mean(np.abs(output-testData_Y)))
print("use time:%.2fmins" %((time.time()-start)/60))
# import numpy as np
# import keras
# from keras.models import Sequential
# from keras.layers import Dense,BatchNormalization,Conv2D,Conv2DTranspose,Activation,Reshape,Flatten
# from keras.utils.np_utils import to_categorical
# import tensorflow as tf
# import time
#
# start = time.time()
# tfconfig = tf.ConfigProto(allow_soft_placement=True)
# tfconfig.gpu_options.allow_growth = True
# tf.Session(config=tfconfig)
#
# # super parameter:
# learningRate = 0.1
# batchSize = 5000
# epochs = 500
# activation = "elu"
# loss = "categorical_crossentropy"
#
# print("learningRate:",learningRate)
# print("batchSize :",batchSize)
# print("epochs :",epochs)
# print("activation :",activation)
# print("loss :",loss)
#
# # 建立模型
# model = Sequential()
# model.add(Reshape((1,1,1),input_shape=(1,)))
# model.add(BatchNormalization())
# model.add(Conv2DTranspose(5,(3,3),activation=activation))
# model.add(BatchNormalization())
# model.add(Conv2DTranspose(5,(3,3),activation=activation))
# model.add(BatchNormalization())
# model.add(Conv2D(5,(3,3),activation=activation))
# model.add(BatchNormalization())
# model.add(Conv2D(5,(3,3),activation=activation))
# model.add(BatchNormalization())
# model.add(Conv2D(2,(1,1),activation=activation))
# model.add(BatchNormalization())
# model.add(Flatten())
# model.add(Activation("sigmoid"))
#
# # 编译模型
# model.compile(optimizer=keras.optimizers.Adam(lr=learningRate),loss=loss)
# print("model compiled!")
#
# # save model
# tensorboard = keras.callbacks.TensorBoard(log_dir='logConv', write_images=True, histogram_freq=0)
# logger = keras.callbacks.CSVLogger('logConv/log.csv', separator=',', append=False)
# earlystop = keras.callbacks.EarlyStopping(monitor='loss', patience=0, verbose=0, mode='auto')
# model_saver = keras.callbacks.ModelCheckpoint('logConv/PrimeDiscriminatorConv.hdf5', monitor='loss', verbose=2,
# save_best_only=True,
# save_weights_only=True, mode='auto', period=1)
#
#
# data = np.loadtxt("file/BalancePrimeData_1_million.txt",delimiter=',',dtype=int)
# trainData_X = data[:-10000,0]
# trainData_Y = data[:-10000,1]
# testData_X = data[-10000:,0]
# testData_Y = data[-10000:,1]
# trainData_Y = to_categorical(trainData_Y)
# testData_Y = to_categorical(testData_Y)
#
# model.fit(trainData_X,trainData_Y,batch_size=batchSize,epochs=epochs,verbose=2,callbacks=[tensorboard,logger,model_saver])
# output = model.predict(testData_X)
# print("the output is:",output)
# print("the result:",np.mean(np.abs(output-testData_Y)))
# print("use time:%.2fmins" %((time.time()-start)/60))
| 35.166667
| 167
| 0.693172
| 696
| 5,697
| 5.570402
| 0.199713
| 0.057777
| 0.080475
| 0.09595
| 0.92649
| 0.907918
| 0.907918
| 0.907918
| 0.884189
| 0.862007
| 0
| 0.035201
| 0.157276
| 5,697
| 162
| 168
| 35.166667
| 0.772339
| 0.45954
| 0
| 0.152542
| 0
| 0
| 0.117772
| 0.045487
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.118644
| 0
| 0.118644
| 0.169492
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e772ce12b4a17586b080bf7008c3e4831256c497
| 26
|
py
|
Python
|
xlwings/rest/__init__.py
|
kushal-kumaran/xlwings
|
36ea1ba91ecb1c37d36d87dfa7ed987c06bca142
|
[
"BSD-3-Clause"
] | 1,138
|
2015-01-02T23:04:18.000Z
|
2019-04-02T09:04:09.000Z
|
xlwings/rest/__init__.py
|
kushal-kumaran/xlwings
|
36ea1ba91ecb1c37d36d87dfa7ed987c06bca142
|
[
"BSD-3-Clause"
] | 872
|
2015-01-02T01:43:52.000Z
|
2019-04-02T20:30:10.000Z
|
xlwings/rest/__init__.py
|
kushal-kumaran/xlwings
|
36ea1ba91ecb1c37d36d87dfa7ed987c06bca142
|
[
"BSD-3-Clause"
] | 261
|
2015-01-13T17:34:07.000Z
|
2019-03-20T17:33:36.000Z
|
from .api import api, run
| 13
| 25
| 0.730769
| 5
| 26
| 3.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.192308
| 26
| 1
| 26
| 26
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e7b9ef4c8e089aba82922c15317daff58d79fe0f
| 174
|
py
|
Python
|
crawler.py
|
n8wachT/BotListBot
|
457160498a90c8d0a63d5a9f7400227e35431b6d
|
[
"MIT"
] | null | null | null |
crawler.py
|
n8wachT/BotListBot
|
457160498a90c8d0a63d5a9f7400227e35431b6d
|
[
"MIT"
] | null | null | null |
crawler.py
|
n8wachT/BotListBot
|
457160498a90c8d0a63d5a9f7400227e35431b6d
|
[
"MIT"
] | null | null | null |
from model.botlist import BotList
if __name__ == '__main__':
# c = Channel("@botlist", "https://telegram.me/botlist")
c = BotList("", "https://telegram.me/botlist")
| 29
| 60
| 0.655172
| 21
| 174
| 5.047619
| 0.571429
| 0.226415
| 0.377358
| 0.415094
| 0.54717
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149425
| 174
| 5
| 61
| 34.8
| 0.716216
| 0.310345
| 0
| 0
| 0
| 0
| 0.29661
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
e7c073e5eab5fdb1fa417cbe7b2f880baea0fdba
| 1,348
|
py
|
Python
|
src/WebApp/SUPERSTAR/models.py
|
abradle/ccf
|
2c10e86aa7c1a1d00881ce469a612e423d7d9bd1
|
[
"Apache-2.0"
] | 1
|
2021-06-03T23:46:47.000Z
|
2021-06-03T23:46:47.000Z
|
src/WebApp/SUPERSTAR/models.py
|
abradle/ccf
|
2c10e86aa7c1a1d00881ce469a612e423d7d9bd1
|
[
"Apache-2.0"
] | null | null | null |
src/WebApp/SUPERSTAR/models.py
|
abradle/ccf
|
2c10e86aa7c1a1d00881ce469a612e423d7d9bd1
|
[
"Apache-2.0"
] | 3
|
2016-04-16T16:30:25.000Z
|
2018-03-11T11:00:58.000Z
|
from django.db import models
from PLIFS.models import PlifProbeBit, PlifProbe
from IOhandle.models import Target
class PlifVis(models.Model):
"""Model to hold the JSON for a PLIF string"""
# The target it relates to
target_id = models.ForeignKey(Target, unique=True)
# The JSON of the vals
json_text = models.TextField()
class PlifVisGrid(models.Model):
"""Model to hold the JSON for a PLIF string"""
# The target it relates to
target_id = models.ForeignKey(Target)
# The JSON of the vals
json_text = models.TextField()
# The spacings of the grid
grid_space = models.FloatField()
class Meta:
unique_together = ('grid_space', 'target_id', )
class PlifProbeScore(models.Model):
"""Model to hold a score for a PLIF probe"""
# the score
score = models.FloatField()
# The item it links to
plif_probe = models.ForeignKey(PlifProbe, unique=True)
class PlifProbeGridScoreNew(models.Model):
"""Model to hold a score for a PLIF probe - with different grid spacing"""
# the score
score = models.FloatField()
# The item it links to
plif_probe = models.ForeignKey(PlifProbe)
# The grid spacing
grid_space = models.FloatField()
class Meta:
unique_together = ('grid_space', 'plif_probe', )
| 29.955556
| 79
| 0.664688
| 176
| 1,348
| 5.011364
| 0.267045
| 0.05102
| 0.072562
| 0.081633
| 0.70068
| 0.70068
| 0.70068
| 0.70068
| 0.70068
| 0.612245
| 0
| 0
| 0.247033
| 1,348
| 45
| 80
| 29.955556
| 0.868966
| 0.28635
| 0
| 0.380952
| 0
| 0
| 0.04387
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.904762
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
99ce686fa471766688f919adbca44e5eaf14ca67
| 42
|
py
|
Python
|
app_018/app.py
|
OmarElKhatibCS/DevOpsJourney
|
73765936ecbe9ea8d3def2c6197242bba18d3d29
|
[
"MIT"
] | null | null | null |
app_018/app.py
|
OmarElKhatibCS/DevOpsJourney
|
73765936ecbe9ea8d3def2c6197242bba18d3d29
|
[
"MIT"
] | null | null | null |
app_018/app.py
|
OmarElKhatibCS/DevOpsJourney
|
73765936ecbe9ea8d3def2c6197242bba18d3d29
|
[
"MIT"
] | 1
|
2021-06-16T14:02:15.000Z
|
2021-06-16T14:02:15.000Z
|
print("Hello Dev.to Folks! this is Omar")
| 21
| 41
| 0.714286
| 8
| 42
| 3.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 42
| 1
| 42
| 42
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0.761905
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
99e94c8a690b3cc216206e51114dbe1342f800a5
| 5,437
|
py
|
Python
|
tests/examples/minlplib/st_qpk3.py
|
ouyang-w-19/decogo
|
52546480e49776251d4d27856e18a46f40c824a1
|
[
"MIT"
] | 2
|
2021-07-03T13:19:10.000Z
|
2022-02-06T10:48:13.000Z
|
tests/examples/minlplib/st_qpk3.py
|
ouyang-w-19/decogo
|
52546480e49776251d4d27856e18a46f40c824a1
|
[
"MIT"
] | 1
|
2021-07-04T14:52:14.000Z
|
2021-07-15T10:17:11.000Z
|
tests/examples/minlplib/st_qpk3.py
|
ouyang-w-19/decogo
|
52546480e49776251d4d27856e18a46f40c824a1
|
[
"MIT"
] | null | null | null |
# NLP written by GAMS Convert at 04/21/18 13:54:25
#
# Equation counts
# Total E G L N X C B
# 23 1 0 22 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 12 12 0 0 0 0 0 0
# FX 0 0 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 254 243 11 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.x1 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x2 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x3 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x4 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x5 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x6 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x7 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x8 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x9 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x10 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x11 = Var(within=Reals,bounds=(0,None),initialize=0)
m.obj = Objective(expr=0.5*m.x1*m.x2 - m.x1*m.x1 + 0.5*m.x2*m.x1 - m.x2*m.x2 + 0.5*m.x2*m.x3 + 0.5*m.x3*m.x2 - m.x3*m.x3
+ 0.5*m.x3*m.x4 + 0.5*m.x4*m.x3 - m.x4*m.x4 + 0.5*m.x4*m.x5 + 0.5*m.x5*m.x4 - m.x5*m.x5 + 0.5*
m.x5*m.x6 + 0.5*m.x6*m.x5 - m.x6*m.x6 + 0.5*m.x6*m.x7 + 0.5*m.x7*m.x6 - m.x7*m.x7 + 0.5*m.x7*m.x8
+ 0.5*m.x8*m.x7 - m.x8*m.x8 + 0.5*m.x8*m.x9 + 0.5*m.x9*m.x8 - m.x9*m.x9 + 0.5*m.x9*m.x10 + 0.5*
m.x10*m.x9 - m.x10*m.x10 + 0.5*m.x10*m.x11 + 0.5*m.x11*m.x10 - m.x11*m.x11, sense=minimize)
m.c1 = Constraint(expr= - m.x1 - 2*m.x2 - 3*m.x3 - 4*m.x4 - 5*m.x5 - 6*m.x6 - 7*m.x7 - 8*m.x8 - 9*m.x9 - 10*m.x10
- 11*m.x11 <= 0)
m.c2 = Constraint(expr= - 2*m.x1 - 3*m.x2 - 4*m.x3 - 5*m.x4 - 6*m.x5 - 7*m.x6 - 8*m.x7 - 9*m.x8 - 10*m.x9 - 11*m.x10
- m.x11 <= 0)
m.c3 = Constraint(expr= - 3*m.x1 - 4*m.x2 - 5*m.x3 - 6*m.x4 - 7*m.x5 - 8*m.x6 - 9*m.x7 - 10*m.x8 - 11*m.x9 - m.x10
- 2*m.x11 <= 0)
m.c4 = Constraint(expr= - 4*m.x1 - 5*m.x2 - 6*m.x3 - 7*m.x4 - 8*m.x5 - 9*m.x6 - 10*m.x7 - 11*m.x8 - m.x9 - 2*m.x10
- 3*m.x11 <= 0)
m.c5 = Constraint(expr= - 5*m.x1 - 6*m.x2 - 7*m.x3 - 8*m.x4 - 9*m.x5 - 10*m.x6 - 11*m.x7 - m.x8 - 2*m.x9 - 3*m.x10
- 4*m.x11 <= 0)
m.c6 = Constraint(expr= - 6*m.x1 - 7*m.x2 - 8*m.x3 - 9*m.x4 - 10*m.x5 - 11*m.x6 - m.x7 - 2*m.x8 - 3*m.x9 - 4*m.x10
- 5*m.x11 <= 0)
m.c7 = Constraint(expr= - 7*m.x1 - 8*m.x2 - 9*m.x3 - 10*m.x4 - 11*m.x5 - m.x6 - 2*m.x7 - 3*m.x8 - 4*m.x9 - 5*m.x10
- 6*m.x11 <= 0)
m.c8 = Constraint(expr= - 8*m.x1 - 9*m.x2 - 10*m.x3 - 11*m.x4 - m.x5 - 2*m.x6 - 3*m.x7 - 4*m.x8 - 5*m.x9 - 6*m.x10
- 7*m.x11 <= 0)
m.c9 = Constraint(expr= - 9*m.x1 - 10*m.x2 - 11*m.x3 - m.x4 - 2*m.x5 - 3*m.x6 - 4*m.x7 - 5*m.x8 - 6*m.x9 - 7*m.x10
- 8*m.x11 <= 0)
m.c10 = Constraint(expr= - 10*m.x1 - 11*m.x2 - m.x3 - 2*m.x4 - 3*m.x5 - 4*m.x6 - 5*m.x7 - 6*m.x8 - 7*m.x9 - 8*m.x10
- 9*m.x11 <= 0)
m.c11 = Constraint(expr= - 11*m.x1 - m.x2 - 2*m.x3 - 3*m.x4 - 4*m.x5 - 5*m.x6 - 6*m.x7 - 7*m.x8 - 8*m.x9 - 9*m.x10
- 10*m.x11 <= 0)
m.c12 = Constraint(expr= m.x1 + 2*m.x2 + 3*m.x3 + 4*m.x4 + 5*m.x5 + 6*m.x6 + 7*m.x7 + 8*m.x8 + 9*m.x9 + 10*m.x10
+ 11*m.x11 <= 66)
m.c13 = Constraint(expr= 2*m.x1 + 3*m.x2 + 4*m.x3 + 5*m.x4 + 6*m.x5 + 7*m.x6 + 8*m.x7 + 9*m.x8 + 10*m.x9 + 11*m.x10
+ m.x11 <= 66)
m.c14 = Constraint(expr= 3*m.x1 + 4*m.x2 + 5*m.x3 + 6*m.x4 + 7*m.x5 + 8*m.x6 + 9*m.x7 + 10*m.x8 + 11*m.x9 + m.x10
+ 2*m.x11 <= 66)
m.c15 = Constraint(expr= 4*m.x1 + 5*m.x2 + 6*m.x3 + 7*m.x4 + 8*m.x5 + 9*m.x6 + 10*m.x7 + 11*m.x8 + m.x9 + 2*m.x10
+ 3*m.x11 <= 66)
m.c16 = Constraint(expr= 5*m.x1 + 6*m.x2 + 7*m.x3 + 8*m.x4 + 9*m.x5 + 10*m.x6 + 11*m.x7 + m.x8 + 2*m.x9 + 3*m.x10
+ 4*m.x11 <= 66)
m.c17 = Constraint(expr= 6*m.x1 + 7*m.x2 + 8*m.x3 + 9*m.x4 + 10*m.x5 + 11*m.x6 + m.x7 + 2*m.x8 + 3*m.x9 + 4*m.x10
+ 5*m.x11 <= 66)
m.c18 = Constraint(expr= 7*m.x1 + 8*m.x2 + 9*m.x3 + 10*m.x4 + 11*m.x5 + m.x6 + 2*m.x7 + 3*m.x8 + 4*m.x9 + 5*m.x10
+ 6*m.x11 <= 66)
m.c19 = Constraint(expr= 8*m.x1 + 9*m.x2 + 10*m.x3 + 11*m.x4 + m.x5 + 2*m.x6 + 3*m.x7 + 4*m.x8 + 5*m.x9 + 6*m.x10
+ 7*m.x11 <= 66)
m.c20 = Constraint(expr= 9*m.x1 + 10*m.x2 + 11*m.x3 + m.x4 + 2*m.x5 + 3*m.x6 + 4*m.x7 + 5*m.x8 + 6*m.x9 + 7*m.x10
+ 8*m.x11 <= 66)
m.c21 = Constraint(expr= 10*m.x1 + 11*m.x2 + m.x3 + 2*m.x4 + 3*m.x5 + 4*m.x6 + 5*m.x7 + 6*m.x8 + 7*m.x9 + 8*m.x10
+ 9*m.x11 <= 66)
m.c22 = Constraint(expr= 11*m.x1 + m.x2 + 2*m.x3 + 3*m.x4 + 4*m.x5 + 5*m.x6 + 6*m.x7 + 7*m.x8 + 8*m.x9 + 9*m.x10
+ 10*m.x11 <= 66)
| 50.342593
| 120
| 0.447305
| 1,183
| 5,437
| 2.05579
| 0.096365
| 0.034539
| 0.024671
| 0.090461
| 0.796053
| 0.782895
| 0.780428
| 0.719161
| 0.719161
| 0.551809
| 0
| 0.227273
| 0.336399
| 5,437
| 107
| 121
| 50.813084
| 0.446785
| 0.124701
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.016129
| 0
| 0.016129
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8209b17df8923dd5042f33f413793fe66e3854a8
| 409
|
py
|
Python
|
utils/unit_conversion.py
|
char3176/Vision2020_code
|
610ea826bb82c28a79f77fc09f89f0ee1d6d0fe9
|
[
"Apache-2.0"
] | null | null | null |
utils/unit_conversion.py
|
char3176/Vision2020_code
|
610ea826bb82c28a79f77fc09f89f0ee1d6d0fe9
|
[
"Apache-2.0"
] | null | null | null |
utils/unit_conversion.py
|
char3176/Vision2020_code
|
610ea826bb82c28a79f77fc09f89f0ee1d6d0fe9
|
[
"Apache-2.0"
] | null | null | null |
def feet2inches(feet):
return feet * 12
def inches2feet(inches):
feet = inches / 12.0
remainder_inches = feet % 12.0
return feet, remainder_inches
def inches2meters(inches):
return inches * 39.37007874
def meters2inches(meters):
return meters / 0.0254
def meters2feet(meters):
inches = meters2inches(meters)
feet, remainder_inches = inches2feet(inches)
return feet, remainder_inches
| 17.782609
| 46
| 0.740831
| 51
| 409
| 5.862745
| 0.313725
| 0.200669
| 0.190635
| 0.167224
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088757
| 0.173594
| 409
| 22
| 47
| 18.590909
| 0.795858
| 0
| 0
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.357143
| false
| 0
| 0
| 0.214286
| 0.714286
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
8217d1bad7874fe28547981c8b106f8460f9b328
| 145
|
py
|
Python
|
graph_generation/config_params/data_integrity_params.py
|
googleinterns/data-dependency-graph-analysis
|
5629f2e4cc3fd71c8976483b0c2b4bdbcc2a7643
|
[
"Apache-2.0"
] | 4
|
2020-10-03T01:41:19.000Z
|
2021-01-21T16:28:16.000Z
|
graph_generation/config_params/data_integrity_params.py
|
googleinterns/data-dependency-graph-analysis
|
5629f2e4cc3fd71c8976483b0c2b4bdbcc2a7643
|
[
"Apache-2.0"
] | 24
|
2020-08-06T16:01:14.000Z
|
2020-10-10T23:02:23.000Z
|
graph_generation/config_params/data_integrity_params.py
|
googleinterns/data-dependency-graph-analysis
|
5629f2e4cc3fd71c8976483b0c2b4bdbcc2a7643
|
[
"Apache-2.0"
] | null | null | null |
class DataIntegrityParams:
def __init__(self, data_volatility_proba_map):
self.data_volatility_proba_map = data_volatility_proba_map
| 36.25
| 66
| 0.813793
| 18
| 145
| 5.833333
| 0.5
| 0.4
| 0.542857
| 0.628571
| 0.495238
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 145
| 3
| 67
| 48.333333
| 0.84
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
823bfb499004c94918dd0b2c1ca17b3c1c0e507c
| 13,850
|
py
|
Python
|
tests/iaas_classic/export_all/test_export_all.py
|
ericmharris/gc3-query
|
0bf5226130aafbb1974aeb96d93ee1996833e87d
|
[
"MIT"
] | null | null | null |
tests/iaas_classic/export_all/test_export_all.py
|
ericmharris/gc3-query
|
0bf5226130aafbb1974aeb96d93ee1996833e87d
|
[
"MIT"
] | null | null | null |
tests/iaas_classic/export_all/test_export_all.py
|
ericmharris/gc3-query
|
0bf5226130aafbb1974aeb96d93ee1996833e87d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
gc3-query.test_export_all [9/11/2018 2:28 PM]
~~~~~~~~~~~~~~~~
<DESCR SHORT>
<DESCR>
"""
################################################################################
## Standard Library Imports
import sys, os
################################################################################
## Third-Party Imports
from dataclasses import dataclass
import pytest
################################################################################
## Project Imports
from gc3_query.lib import *
##################
from pathlib import Path
import pytest
import click
from mongoengine import connect
from gc3_query.lib import gc3_cfg
from gc3_query.lib.gc3_config import GC3Config
from gc3_query.lib.iaas_classic.models.sec_rule_model import SecRuleModel
from gc3_query.lib.iaas_classic.sec_rules import SecRules
from gc3_query.lib.base_collections import NestedOrderedDictAttrListBase
# fixme? from gc3_query.lib.open_api import API_SPECS_DIR
import json
from pathlib import Path
import pytest
from bravado_core.spec import Spec
from bravado.response import BravadoResponse, BravadoResponseMetadata
import mongoengine
from pymongo import MongoClient
from mongoengine.connection import get_connection, register_connection
from gc3_query.lib import *
from gc3_query.lib import gc3_cfg
from gc3_query.lib.export_delegates.mongodb import storage_adapter_init
# from gc3_query.lib.export_delegates.mongodb import storage_adapter_init
# # fixme? from gc3_query.lib.open_api import API_SPECS_DIR
from pathlib import Path
from gc3_query.lib import *
import pytest
# from pprint import pprint, pformat
_debug, _info, _warning, _error, _critical = get_logging(name=__name__)
TEST_BASE_DIR: Path = Path(__file__).parent
# config_dir = TEST_BASE_DIR.joinpath("config")
config_dir = gc3_cfg.BASE_DIR.joinpath("etc/config")
output_dir = TEST_BASE_DIR.joinpath('output')
def test_setup():
assert TEST_BASE_DIR.exists()
# assert API_SPECS_DIR.exists()
if not config_dir.exists():
config_dir.mkdir()
if not output_dir.exists():
output_dir.mkdir()
##################
from gc3_query.lib.iaas_classic.instances import Instances
from gc3_query.lib.iaas_classic.models.instance_model import InstanceModel
@pytest.fixture()
def setup_Instances():
service = 'Instances'
# idm_domain = 'gc30003'
gc3_config = GC3Config(atoml_config_dir=config_dir)
mongodb_connection: MongoClient = storage_adapter_init(mongodb_config=gc3_config.iaas_classic.mongodb.as_dict())
service_cfg = gc3_config.iaas_classic.services.compute[service]
idm_domains = [idm_domain for idm_domain in gc3_config.idm.domains.values() if idm_domain.active]
# idm_cfg = gc3_config.idm.domains[idm_domain]
# iaas_service = Instances(service_cfg=service_cfg, idm_cfg=idm_cfg)
iaas_services = [Instances(service_cfg=service_cfg, idm_cfg=idm_cfg) for idm_cfg in idm_domains]
assert service==service_cfg.name
yield service_cfg, idm_domains, iaas_services, mongodb_connection
def test_save_all_Instances(setup_Instances):
service_cfg, idm_domains, iaas_services, mongodb_connection = setup_Instances
# http_client: IaaSRequestsHTTPClient = IaaSRequestsHTTPClient(idm_cfg=idm_cfg)
total_results = 0
for iaas_service in iaas_services:
try:
service_response = iaas_service.dump()
except Exception as e:
_warning(f"Exception during iaas_service.dump() for {iaas_service.service_name} on IDM Domain {iaas_service.idm_cfg.name}\nRetrying ...")
_warning(f"Exception: {e}")
_warning(f"Retrying ...")
service_response = iaas_service.dump()
assert service_response.result
results = service_response.result.result
total_results = total_results + len(results)
for result in results:
result_dict = result._as_dict()
model = InstanceModel(**result_dict)
saved = model.save()
print(f"\nPRINT: {iaas_service.service_name} exported: {total_results}")
# click.echo(click.style(f"\n{iaas_service.service_name} instances exported: {total_results}", fg="green"))
from gc3_query.lib.iaas_classic.sec_applications import SecApplications
from gc3_query.lib.iaas_classic.models.sec_applications_model import SecApplicationModel
@pytest.fixture()
def setup_SecApplications():
service = 'SecApplications'
# idm_domain = 'gc30003'
gc3_config = GC3Config(atoml_config_dir=config_dir)
mongodb_connection: MongoClient = storage_adapter_init(mongodb_config=gc3_config.iaas_classic.mongodb.as_dict())
service_cfg = gc3_config.iaas_classic.services.compute[service]
idm_domains = [idm_domain for idm_domain in gc3_config.idm.domains.values() if idm_domain.active]
# idm_cfg = gc3_config.idm.domains[idm_domain]
# iaas_service = SecApplications(service_cfg=service_cfg, idm_cfg=idm_cfg)
iaas_services = [SecApplications(service_cfg=service_cfg, idm_cfg=idm_cfg) for idm_cfg in idm_domains]
assert service==service_cfg.name
yield service_cfg, idm_domains, iaas_services, mongodb_connection
def test_save_all_SecApplications(setup_SecApplications):
service_cfg, idm_domains, iaas_services, mongodb_connection = setup_SecApplications
# http_client: IaaSRequestsHTTPClient = IaaSRequestsHTTPClient(idm_cfg=idm_cfg)
total_results = 0
for iaas_service in iaas_services:
try:
service_response = iaas_service.dump()
except Exception as e:
_warning(f"Exception during iaas_service.dump() for {iaas_service.service_name} on IDM Domain {iaas_service.idm_cfg.name}\nRetrying ...")
_warning(f"Exception: {e}")
_warning(f"Retrying ...")
service_response = iaas_service.dump()
assert service_response.result
results = service_response.result.result
total_results = total_results + len(results)
for result in results:
result_dict = result._as_dict()
model = SecApplicationModel(**result_dict)
saved = model.save()
print(f"\nPRINT: {iaas_service.service_name} exported: {total_results}")
from gc3_query.lib.iaas_classic.sec_ip_lists import SecIPLists
from gc3_query.lib.iaas_classic.models.sec_ip_lists_model import SecIPListsModel
@pytest.fixture()
def setup_SecIPLists():
service = 'SecIPLists'
# idm_domain = 'gc30003'
gc3_config = GC3Config(atoml_config_dir=config_dir)
mongodb_connection: MongoClient = storage_adapter_init(mongodb_config=gc3_config.iaas_classic.mongodb.as_dict())
service_cfg = gc3_config.iaas_classic.services.compute[service]
idm_domains = [idm_domain for idm_domain in gc3_config.idm.domains.values() if idm_domain.active]
# idm_cfg = gc3_config.idm.domains[idm_domain]
# iaas_service = SecIPLists(service_cfg=service_cfg, idm_cfg=idm_cfg)
iaas_services = [SecIPLists(service_cfg=service_cfg, idm_cfg=idm_cfg) for idm_cfg in idm_domains]
assert service==service_cfg.name
yield service_cfg, idm_domains, iaas_services, mongodb_connection
def test_save_all_SecIPLists(setup_SecIPLists):
service_cfg, idm_domains, iaas_services, mongodb_connection = setup_SecIPLists
# http_client: IaaSRequestsHTTPClient = IaaSRequestsHTTPClient(idm_cfg=idm_cfg)
total_results = 0
for iaas_service in iaas_services:
try:
service_response = iaas_service.dump()
except Exception as e:
_warning(f"Exception during iaas_service.dump() for {iaas_service.service_name} on IDM Domain {iaas_service.idm_cfg.name}\nRetrying ...")
_warning(f"Exception: {e}")
_warning(f"Retrying ...")
service_response = iaas_service.dump()
assert service_response.result
results = service_response.result.result
total_results = total_results + len(results)
for result in results:
result_dict = result._as_dict()
model = SecIPListsModel(**result_dict)
saved = model.save()
print(f"\nPRINT: {iaas_service.service_name} exported: {total_results}")
from gc3_query.lib.iaas_classic.sec_lists import SecLists
from gc3_query.lib.iaas_classic.models.sec_list_model import SecListModel
@pytest.fixture()
def setup_SecLists():
service = 'SecLists'
# idm_domain = 'gc30003'
gc3_config = GC3Config(atoml_config_dir=config_dir)
mongodb_connection: MongoClient = storage_adapter_init(mongodb_config=gc3_config.iaas_classic.mongodb.as_dict())
service_cfg = gc3_config.iaas_classic.services.compute[service]
idm_domains = [idm_domain for idm_domain in gc3_config.idm.domains.values() if idm_domain.active]
# idm_cfg = gc3_config.idm.domains[idm_domain]
# iaas_service = SecLists(service_cfg=service_cfg, idm_cfg=idm_cfg)
iaas_services = [SecLists(service_cfg=service_cfg, idm_cfg=idm_cfg) for idm_cfg in idm_domains]
assert service==service_cfg.name
yield service_cfg, idm_domains, iaas_services, mongodb_connection
def test_save_all_SecLists(setup_SecLists):
service_cfg, idm_domains, iaas_services, mongodb_connection = setup_SecLists
# http_client: IaaSRequestsHTTPClient = IaaSRequestsHTTPClient(idm_cfg=idm_cfg)
total_results = 0
for iaas_service in iaas_services:
try:
service_response = iaas_service.dump()
except Exception as e:
_warning(f"Exception during iaas_service.dump() for {iaas_service.service_name} on IDM Domain {iaas_service.idm_cfg.name}\nRetrying ...")
_warning(f"Exception: {e}")
_warning(f"Retrying ...")
service_response = iaas_service.dump()
assert service_response.result
results = service_response.result.result
total_results = total_results + len(results)
for result in results:
result_dict = result._as_dict()
model = SecListModel(**result_dict)
saved = model.save()
print(f"\nPRINT: {iaas_service.service_name} exported: {total_results}")
@pytest.fixture()
def setup_SecRules():
service = 'SecRules'
# idm_domain = 'gc30003'
gc3_config = GC3Config(atoml_config_dir=config_dir)
mongodb_connection: MongoClient = storage_adapter_init(mongodb_config=gc3_config.iaas_classic.mongodb.as_dict())
service_cfg = gc3_config.iaas_classic.services.compute[service]
idm_domains = [idm_domain for idm_domain in gc3_config.idm.domains.values() if idm_domain.active]
# idm_cfg = gc3_config.idm.domains[idm_domain]
# iaas_service = SecRules(service_cfg=service_cfg, idm_cfg=idm_cfg)
iaas_services = [SecRules(service_cfg=service_cfg, idm_cfg=idm_cfg) for idm_cfg in idm_domains]
assert service == service_cfg.name
yield service_cfg, idm_domains, iaas_services, mongodb_connection
def test_save_all_SecRules(setup_SecRules):
service_cfg, idm_domains, iaas_services, mongodb_connection = setup_SecRules
# http_client: IaaSRequestsHTTPClient = IaaSRequestsHTTPClient(idm_cfg=idm_cfg)
total_results = 0
for iaas_service in iaas_services:
try:
service_response = iaas_service.dump()
except Exception as e:
_warning(f"Exception during iaas_service.dump() for {iaas_service.service_name} on IDM Domain {iaas_service.idm_cfg.name}\nRetrying ...")
_warning(f"Exception: {e}")
_warning(f"Retrying ...")
service_response = iaas_service.dump()
assert service_response.result
results = service_response.result.result
total_results = total_results + len(results)
for result in results:
result_dict = result._as_dict()
model = SecRuleModel(**result_dict)
saved = model.save()
print(f"\nPRINT: {iaas_service.service_name} exported: {total_results}")
from gc3_query.lib.iaas_classic.ip_reservations import IPReservations
from gc3_query.lib.iaas_classic.models.ip_reservations_model import IPReservationModel
@pytest.fixture()
def setup_IPReservations():
service = 'IPReservations'
# idm_domain = 'gc30003'
gc3_config = GC3Config(atoml_config_dir=config_dir)
mongodb_connection: MongoClient = storage_adapter_init(mongodb_config=gc3_config.iaas_classic.mongodb.as_dict())
service_cfg = gc3_config.iaas_classic.services.compute[service]
idm_domains = [idm_domain for idm_domain in gc3_config.idm.domains.values() if idm_domain.active]
# idm_cfg = gc3_config.idm.domains[idm_domain]
# iaas_service = IPReservations(service_cfg=service_cfg, idm_cfg=idm_cfg)
iaas_services = [IPReservations(service_cfg=service_cfg, idm_cfg=idm_cfg) for idm_cfg in idm_domains]
assert service == service_cfg.name
yield service_cfg, idm_domains, iaas_services, mongodb_connection
def test_save_all_IPReservations(setup_IPReservations):
service_cfg, idm_domains, iaas_services, mongodb_connection = setup_IPReservations
# http_client: IaaSRequestsHTTPClient = IaaSRequestsHTTPClient(idm_cfg=idm_cfg)
total_results = 0
for iaas_service in iaas_services:
try:
service_response = iaas_service.dump()
except Exception as e:
_warning(f"Exception during iaas_service.dump() for {iaas_service.service_name} on IDM Domain {iaas_service.idm_cfg.name}\nRetrying ...")
_warning(f"Exception: {e}")
_warning(f"Retrying ...")
service_response = iaas_service.dump()
assert service_response.result
results = service_response.result.result
total_results = total_results + len(results)
for result in results:
result_dict = result._as_dict()
model = IPReservationModel(**result_dict)
saved = model.save()
print(f"\nPRINT: {iaas_service.service_name} exported: {total_results}")
| 42.615385
| 149
| 0.727292
| 1,758
| 13,850
| 5.397042
| 0.087031
| 0.034148
| 0.028457
| 0.036362
| 0.802909
| 0.796058
| 0.781935
| 0.769077
| 0.754321
| 0.706682
| 0
| 0.009722
| 0.168231
| 13,850
| 324
| 150
| 42.746914
| 0.813889
| 0.134513
| 0
| 0.678733
| 0
| 0.027149
| 0.115902
| 0.046807
| 0
| 0
| 0
| 0.003086
| 0.058824
| 1
| 0.058824
| false
| 0
| 0.167421
| 0
| 0.226244
| 0.027149
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8245943398c1d7e94b97859c930b2d008a742573
| 37,909
|
py
|
Python
|
instances/passenger_demand/pas-20210421-2109-int14000000000000001e/9.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
instances/passenger_demand/pas-20210421-2109-int14000000000000001e/9.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
instances/passenger_demand/pas-20210421-2109-int14000000000000001e/9.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
"""
PASSENGERS
"""
numPassengers = 3290
passenger_arriving = (
(2, 5, 7, 5, 3, 0, 6, 10, 6, 8, 1, 0), # 0
(2, 8, 4, 3, 2, 0, 8, 6, 5, 5, 4, 0), # 1
(5, 10, 6, 4, 3, 0, 2, 10, 7, 4, 2, 0), # 2
(2, 7, 3, 1, 2, 0, 6, 7, 3, 5, 3, 0), # 3
(5, 6, 4, 5, 1, 0, 5, 7, 5, 6, 0, 0), # 4
(3, 6, 6, 6, 1, 0, 11, 4, 7, 6, 1, 0), # 5
(9, 11, 2, 3, 0, 0, 5, 10, 10, 7, 3, 0), # 6
(8, 11, 9, 3, 1, 0, 5, 8, 4, 5, 0, 0), # 7
(2, 9, 8, 5, 4, 0, 2, 7, 3, 3, 2, 0), # 8
(0, 5, 14, 6, 1, 0, 6, 10, 9, 4, 4, 0), # 9
(4, 8, 9, 2, 2, 0, 8, 8, 7, 7, 3, 0), # 10
(4, 11, 6, 6, 2, 0, 6, 3, 8, 2, 2, 0), # 11
(5, 7, 9, 3, 0, 0, 6, 5, 2, 6, 1, 0), # 12
(8, 5, 6, 5, 0, 0, 7, 18, 6, 5, 3, 0), # 13
(3, 10, 11, 5, 1, 0, 13, 5, 1, 1, 2, 0), # 14
(5, 9, 10, 4, 5, 0, 11, 7, 6, 8, 1, 0), # 15
(5, 9, 4, 5, 3, 0, 4, 8, 5, 4, 1, 0), # 16
(4, 15, 7, 4, 3, 0, 8, 6, 6, 9, 4, 0), # 17
(6, 17, 5, 5, 3, 0, 7, 12, 5, 5, 2, 0), # 18
(4, 10, 7, 5, 3, 0, 8, 9, 8, 5, 3, 0), # 19
(7, 4, 10, 2, 2, 0, 9, 9, 7, 2, 3, 0), # 20
(8, 2, 8, 6, 2, 0, 10, 7, 7, 7, 1, 0), # 21
(1, 11, 8, 4, 2, 0, 8, 13, 10, 7, 0, 0), # 22
(2, 10, 6, 2, 2, 0, 8, 9, 5, 5, 1, 0), # 23
(4, 8, 4, 6, 5, 0, 7, 6, 7, 7, 0, 0), # 24
(2, 5, 10, 1, 2, 0, 9, 14, 6, 5, 1, 0), # 25
(5, 17, 8, 1, 2, 0, 3, 12, 10, 2, 2, 0), # 26
(3, 12, 5, 8, 1, 0, 5, 11, 5, 2, 1, 0), # 27
(2, 7, 6, 6, 1, 0, 4, 6, 5, 3, 1, 0), # 28
(5, 10, 1, 4, 1, 0, 5, 8, 9, 5, 3, 0), # 29
(4, 7, 9, 5, 3, 0, 10, 15, 8, 5, 3, 0), # 30
(5, 9, 7, 4, 3, 0, 7, 11, 6, 5, 2, 0), # 31
(5, 9, 13, 4, 3, 0, 8, 12, 4, 3, 6, 0), # 32
(3, 8, 12, 4, 0, 0, 3, 11, 6, 9, 3, 0), # 33
(4, 5, 10, 7, 4, 0, 5, 8, 6, 3, 4, 0), # 34
(2, 14, 10, 4, 2, 0, 4, 3, 8, 5, 1, 0), # 35
(3, 8, 4, 10, 3, 0, 8, 13, 7, 9, 3, 0), # 36
(3, 3, 8, 2, 1, 0, 6, 10, 13, 4, 2, 0), # 37
(1, 13, 8, 5, 3, 0, 8, 10, 6, 5, 3, 0), # 38
(11, 10, 8, 3, 2, 0, 7, 10, 6, 5, 0, 0), # 39
(3, 11, 4, 7, 2, 0, 4, 6, 5, 4, 2, 0), # 40
(5, 8, 6, 4, 2, 0, 3, 15, 4, 10, 3, 0), # 41
(3, 5, 10, 1, 4, 0, 6, 10, 8, 6, 1, 0), # 42
(7, 4, 5, 1, 3, 0, 4, 6, 1, 2, 2, 0), # 43
(5, 9, 12, 3, 5, 0, 3, 11, 5, 5, 2, 0), # 44
(4, 8, 6, 1, 0, 0, 8, 6, 5, 5, 3, 0), # 45
(3, 8, 7, 6, 3, 0, 11, 9, 6, 4, 3, 0), # 46
(7, 7, 4, 9, 2, 0, 5, 8, 3, 10, 1, 0), # 47
(2, 16, 9, 4, 0, 0, 6, 10, 6, 5, 2, 0), # 48
(6, 9, 7, 3, 1, 0, 11, 12, 6, 9, 7, 0), # 49
(5, 8, 9, 3, 5, 0, 6, 11, 3, 4, 1, 0), # 50
(4, 10, 6, 5, 4, 0, 8, 7, 9, 6, 2, 0), # 51
(8, 10, 7, 0, 2, 0, 2, 13, 9, 5, 3, 0), # 52
(6, 15, 6, 4, 2, 0, 1, 7, 3, 5, 2, 0), # 53
(5, 10, 8, 4, 1, 0, 6, 18, 11, 4, 1, 0), # 54
(6, 5, 11, 4, 4, 0, 2, 9, 3, 5, 2, 0), # 55
(3, 13, 8, 2, 2, 0, 3, 7, 11, 7, 1, 0), # 56
(5, 9, 6, 3, 2, 0, 6, 10, 6, 6, 4, 0), # 57
(6, 8, 8, 2, 0, 0, 5, 10, 6, 3, 2, 0), # 58
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # 59
)
station_arriving_intensity = (
(3.7095121817383676, 9.515044981060607, 11.19193043059126, 8.87078804347826, 10.000240384615385, 6.659510869565219), # 0
(3.7443308140669203, 9.620858238197952, 11.252381752534994, 8.920190141908213, 10.075193108974359, 6.657240994867151), # 1
(3.7787518681104277, 9.725101964085297, 11.31139817195087, 8.968504830917876, 10.148564102564103, 6.654901690821256), # 2
(3.8127461259877085, 9.827663671875001, 11.368936576156813, 9.01569089673913, 10.22028605769231, 6.652493274456523), # 3
(3.8462843698175795, 9.928430874719417, 11.424953852470724, 9.061707125603865, 10.290291666666668, 6.6500160628019325), # 4
(3.879337381718857, 10.027291085770905, 11.479406888210512, 9.106512303743962, 10.358513621794872, 6.647470372886473), # 5
(3.9118759438103607, 10.12413181818182, 11.53225257069409, 9.150065217391306, 10.424884615384617, 6.644856521739131), # 6
(3.943870838210907, 10.218840585104518, 11.58344778723936, 9.19232465277778, 10.489337339743592, 6.64217482638889), # 7
(3.975292847039314, 10.311304899691358, 11.632949425164242, 9.233249396135266, 10.551804487179488, 6.639425603864735), # 8
(4.006112752414399, 10.401412275094698, 11.680714371786634, 9.272798233695653, 10.61221875, 6.636609171195653), # 9
(4.03630133645498, 10.489050224466892, 11.72669951442445, 9.310929951690824, 10.670512820512823, 6.633725845410628), # 10
(4.065829381279876, 10.5741062609603, 11.7708617403956, 9.347603336352659, 10.726619391025642, 6.630775943538648), # 11
(4.094667669007903, 10.656467897727273, 11.813157937017996, 9.382777173913043, 10.780471153846154, 6.627759782608695), # 12
(4.122786981757876, 10.736022647920176, 11.85354499160954, 9.416410250603866, 10.832000801282053, 6.624677679649759), # 13
(4.15015810164862, 10.81265802469136, 11.891979791488144, 9.448461352657004, 10.881141025641025, 6.621529951690821), # 14
(4.1767518107989465, 10.886261541193182, 11.928419223971721, 9.478889266304348, 10.92782451923077, 6.618316915760871), # 15
(4.202538891327675, 10.956720710578002, 11.96282017637818, 9.507652777777778, 10.971983974358976, 6.61503888888889), # 16
(4.227490125353625, 11.023923045998176, 11.995139536025421, 9.53471067330918, 11.013552083333336, 6.611696188103866), # 17
(4.25157629499561, 11.087756060606061, 12.025334190231364, 9.560021739130436, 11.052461538461543, 6.608289130434783), # 18
(4.274768182372451, 11.148107267554012, 12.053361026313912, 9.58354476147343, 11.088645032051284, 6.604818032910629), # 19
(4.297036569602966, 11.204864179994388, 12.079176931590974, 9.60523852657005, 11.122035256410259, 6.601283212560387), # 20
(4.318352238805971, 11.257914311079544, 12.102738793380466, 9.625061820652174, 11.152564903846153, 6.597684986413044), # 21
(4.338685972100283, 11.307145173961842, 12.124003499000287, 9.642973429951692, 11.180166666666667, 6.5940236714975855), # 22
(4.358008551604722, 11.352444281793632, 12.142927935768354, 9.658932140700484, 11.204773237179488, 6.590299584842997), # 23
(4.3762907594381035, 11.393699147727272, 12.159468991002571, 9.672896739130437, 11.226317307692307, 6.586513043478261), # 24
(4.393503377719247, 11.430797284915124, 12.173583552020853, 9.684826011473431, 11.244731570512819, 6.582664364432368), # 25
(4.409617188566969, 11.46362620650954, 12.185228506141103, 9.694678743961353, 11.259948717948719, 6.5787538647343), # 26
(4.424602974100088, 11.492073425662877, 12.194360740681233, 9.702413722826089, 11.271901442307694, 6.574781861413045), # 27
(4.438431516437421, 11.516026455527497, 12.200937142959157, 9.707989734299519, 11.280522435897437, 6.570748671497586), # 28
(4.4510735976977855, 11.535372809255753, 12.204914600292774, 9.711365564613528, 11.285744391025641, 6.566654612016909), # 29
(4.4625, 11.55, 12.20625, 9.7125, 11.287500000000001, 6.562500000000001), # 30
(4.47319183983376, 11.56215031960227, 12.205248928140096, 9.712295118464054, 11.286861125886526, 6.556726763701484), # 31
(4.4836528452685425, 11.574140056818184, 12.202274033816424, 9.711684477124184, 11.28495815602837, 6.547834661835751), # 32
(4.493887715792838, 11.585967720170455, 12.197367798913046, 9.710674080882354, 11.281811569148937, 6.535910757121439), # 33
(4.503901150895141, 11.597631818181819, 12.19057270531401, 9.709269934640524, 11.277441843971632, 6.521042112277196), # 34
(4.513697850063939, 11.609130859374998, 12.181931234903383, 9.707478043300654, 11.27186945921986, 6.503315790021656), # 35
(4.523282512787724, 11.62046335227273, 12.171485869565219, 9.705304411764708, 11.265114893617023, 6.482818853073463), # 36
(4.532659838554988, 11.631627805397729, 12.159279091183576, 9.70275504493464, 11.257198625886524, 6.4596383641512585), # 37
(4.5418345268542195, 11.642622727272729, 12.145353381642513, 9.699835947712419, 11.248141134751775, 6.433861385973679), # 38
(4.5508112771739135, 11.653446626420456, 12.129751222826087, 9.696553125000001, 11.23796289893617, 6.40557498125937), # 39
(4.559594789002558, 11.664098011363638, 12.11251509661836, 9.692912581699348, 11.22668439716312, 6.37486621272697), # 40
(4.568189761828645, 11.674575390625, 12.093687484903382, 9.68892032271242, 11.214326108156028, 6.34182214309512), # 41
(4.576600895140665, 11.684877272727276, 12.07331086956522, 9.684582352941177, 11.2009085106383, 6.3065298350824595), # 42
(4.584832888427111, 11.69500216619318, 12.051427732487923, 9.679904677287583, 11.186452083333334, 6.26907635140763), # 43
(4.592890441176471, 11.704948579545455, 12.028080555555556, 9.674893300653595, 11.17097730496454, 6.229548754789272), # 44
(4.600778252877237, 11.714715021306818, 12.003311820652177, 9.669554227941177, 11.15450465425532, 6.188034107946028), # 45
(4.6085010230179035, 11.724300000000003, 11.97716400966184, 9.663893464052288, 11.137054609929079, 6.144619473596536), # 46
(4.616063451086957, 11.733702024147728, 11.9496796044686, 9.65791701388889, 11.118647650709221, 6.099391914459438), # 47
(4.623470236572891, 11.742919602272728, 11.920901086956523, 9.651630882352942, 11.099304255319149, 6.052438493253375), # 48
(4.630726078964194, 11.751951242897727, 11.890870939009663, 9.645041074346407, 11.079044902482272, 6.003846272696985), # 49
(4.6378356777493615, 11.760795454545454, 11.85963164251208, 9.638153594771243, 11.057890070921987, 5.953702315508913), # 50
(4.6448037324168805, 11.769450745738636, 11.827225679347826, 9.630974448529413, 11.035860239361703, 5.902093684407797), # 51
(4.651634942455243, 11.777915625, 11.793695531400965, 9.623509640522876, 11.012975886524824, 5.849107442112278), # 52
(4.658334007352941, 11.786188600852274, 11.759083680555555, 9.615765175653596, 10.989257491134753, 5.794830651340996), # 53
(4.6649056265984665, 11.79426818181818, 11.723432608695653, 9.60774705882353, 10.964725531914894, 5.739350374812594), # 54
(4.671354499680307, 11.802152876420456, 11.686784797705313, 9.599461294934642, 10.939400487588653, 5.682753675245711), # 55
(4.677685326086957, 11.809841193181818, 11.649182729468599, 9.59091388888889, 10.913302836879433, 5.625127615358988), # 56
(4.683902805306906, 11.817331640625003, 11.610668885869565, 9.582110845588236, 10.886453058510638, 5.566559257871065), # 57
(4.690011636828645, 11.824622727272727, 11.57128574879227, 9.573058169934642, 10.858871631205675, 5.507135665500583), # 58
(0.0, 0.0, 0.0, 0.0, 0.0, 0.0), # 59
)
passenger_arriving_acc = (
(2, 5, 7, 5, 3, 0, 6, 10, 6, 8, 1, 0), # 0
(4, 13, 11, 8, 5, 0, 14, 16, 11, 13, 5, 0), # 1
(9, 23, 17, 12, 8, 0, 16, 26, 18, 17, 7, 0), # 2
(11, 30, 20, 13, 10, 0, 22, 33, 21, 22, 10, 0), # 3
(16, 36, 24, 18, 11, 0, 27, 40, 26, 28, 10, 0), # 4
(19, 42, 30, 24, 12, 0, 38, 44, 33, 34, 11, 0), # 5
(28, 53, 32, 27, 12, 0, 43, 54, 43, 41, 14, 0), # 6
(36, 64, 41, 30, 13, 0, 48, 62, 47, 46, 14, 0), # 7
(38, 73, 49, 35, 17, 0, 50, 69, 50, 49, 16, 0), # 8
(38, 78, 63, 41, 18, 0, 56, 79, 59, 53, 20, 0), # 9
(42, 86, 72, 43, 20, 0, 64, 87, 66, 60, 23, 0), # 10
(46, 97, 78, 49, 22, 0, 70, 90, 74, 62, 25, 0), # 11
(51, 104, 87, 52, 22, 0, 76, 95, 76, 68, 26, 0), # 12
(59, 109, 93, 57, 22, 0, 83, 113, 82, 73, 29, 0), # 13
(62, 119, 104, 62, 23, 0, 96, 118, 83, 74, 31, 0), # 14
(67, 128, 114, 66, 28, 0, 107, 125, 89, 82, 32, 0), # 15
(72, 137, 118, 71, 31, 0, 111, 133, 94, 86, 33, 0), # 16
(76, 152, 125, 75, 34, 0, 119, 139, 100, 95, 37, 0), # 17
(82, 169, 130, 80, 37, 0, 126, 151, 105, 100, 39, 0), # 18
(86, 179, 137, 85, 40, 0, 134, 160, 113, 105, 42, 0), # 19
(93, 183, 147, 87, 42, 0, 143, 169, 120, 107, 45, 0), # 20
(101, 185, 155, 93, 44, 0, 153, 176, 127, 114, 46, 0), # 21
(102, 196, 163, 97, 46, 0, 161, 189, 137, 121, 46, 0), # 22
(104, 206, 169, 99, 48, 0, 169, 198, 142, 126, 47, 0), # 23
(108, 214, 173, 105, 53, 0, 176, 204, 149, 133, 47, 0), # 24
(110, 219, 183, 106, 55, 0, 185, 218, 155, 138, 48, 0), # 25
(115, 236, 191, 107, 57, 0, 188, 230, 165, 140, 50, 0), # 26
(118, 248, 196, 115, 58, 0, 193, 241, 170, 142, 51, 0), # 27
(120, 255, 202, 121, 59, 0, 197, 247, 175, 145, 52, 0), # 28
(125, 265, 203, 125, 60, 0, 202, 255, 184, 150, 55, 0), # 29
(129, 272, 212, 130, 63, 0, 212, 270, 192, 155, 58, 0), # 30
(134, 281, 219, 134, 66, 0, 219, 281, 198, 160, 60, 0), # 31
(139, 290, 232, 138, 69, 0, 227, 293, 202, 163, 66, 0), # 32
(142, 298, 244, 142, 69, 0, 230, 304, 208, 172, 69, 0), # 33
(146, 303, 254, 149, 73, 0, 235, 312, 214, 175, 73, 0), # 34
(148, 317, 264, 153, 75, 0, 239, 315, 222, 180, 74, 0), # 35
(151, 325, 268, 163, 78, 0, 247, 328, 229, 189, 77, 0), # 36
(154, 328, 276, 165, 79, 0, 253, 338, 242, 193, 79, 0), # 37
(155, 341, 284, 170, 82, 0, 261, 348, 248, 198, 82, 0), # 38
(166, 351, 292, 173, 84, 0, 268, 358, 254, 203, 82, 0), # 39
(169, 362, 296, 180, 86, 0, 272, 364, 259, 207, 84, 0), # 40
(174, 370, 302, 184, 88, 0, 275, 379, 263, 217, 87, 0), # 41
(177, 375, 312, 185, 92, 0, 281, 389, 271, 223, 88, 0), # 42
(184, 379, 317, 186, 95, 0, 285, 395, 272, 225, 90, 0), # 43
(189, 388, 329, 189, 100, 0, 288, 406, 277, 230, 92, 0), # 44
(193, 396, 335, 190, 100, 0, 296, 412, 282, 235, 95, 0), # 45
(196, 404, 342, 196, 103, 0, 307, 421, 288, 239, 98, 0), # 46
(203, 411, 346, 205, 105, 0, 312, 429, 291, 249, 99, 0), # 47
(205, 427, 355, 209, 105, 0, 318, 439, 297, 254, 101, 0), # 48
(211, 436, 362, 212, 106, 0, 329, 451, 303, 263, 108, 0), # 49
(216, 444, 371, 215, 111, 0, 335, 462, 306, 267, 109, 0), # 50
(220, 454, 377, 220, 115, 0, 343, 469, 315, 273, 111, 0), # 51
(228, 464, 384, 220, 117, 0, 345, 482, 324, 278, 114, 0), # 52
(234, 479, 390, 224, 119, 0, 346, 489, 327, 283, 116, 0), # 53
(239, 489, 398, 228, 120, 0, 352, 507, 338, 287, 117, 0), # 54
(245, 494, 409, 232, 124, 0, 354, 516, 341, 292, 119, 0), # 55
(248, 507, 417, 234, 126, 0, 357, 523, 352, 299, 120, 0), # 56
(253, 516, 423, 237, 128, 0, 363, 533, 358, 305, 124, 0), # 57
(259, 524, 431, 239, 128, 0, 368, 543, 364, 308, 126, 0), # 58
(259, 524, 431, 239, 128, 0, 368, 543, 364, 308, 126, 0), # 59
)
passenger_arriving_rate = (
(3.7095121817383676, 7.612035984848484, 6.715158258354756, 3.5483152173913037, 2.000048076923077, 0.0, 6.659510869565219, 8.000192307692307, 5.322472826086956, 4.476772172236504, 1.903008996212121, 0.0), # 0
(3.7443308140669203, 7.696686590558361, 6.751429051520996, 3.5680760567632848, 2.0150386217948717, 0.0, 6.657240994867151, 8.060154487179487, 5.352114085144928, 4.500952701013997, 1.9241716476395903, 0.0), # 1
(3.7787518681104277, 7.780081571268237, 6.786838903170522, 3.58740193236715, 2.0297128205128203, 0.0, 6.654901690821256, 8.118851282051281, 5.381102898550726, 4.524559268780347, 1.9450203928170593, 0.0), # 2
(3.8127461259877085, 7.8621309375, 6.821361945694087, 3.6062763586956517, 2.044057211538462, 0.0, 6.652493274456523, 8.176228846153847, 5.409414538043478, 4.547574630462725, 1.965532734375, 0.0), # 3
(3.8462843698175795, 7.942744699775533, 6.854972311482434, 3.624682850241546, 2.0580583333333333, 0.0, 6.6500160628019325, 8.232233333333333, 5.437024275362319, 4.569981540988289, 1.9856861749438832, 0.0), # 4
(3.879337381718857, 8.021832868616723, 6.887644132926307, 3.6426049214975844, 2.0717027243589743, 0.0, 6.647470372886473, 8.286810897435897, 5.463907382246377, 4.591762755284204, 2.005458217154181, 0.0), # 5
(3.9118759438103607, 8.099305454545455, 6.919351542416455, 3.660026086956522, 2.084976923076923, 0.0, 6.644856521739131, 8.339907692307692, 5.490039130434783, 4.612901028277636, 2.0248263636363637, 0.0), # 6
(3.943870838210907, 8.175072468083613, 6.950068672343615, 3.6769298611111116, 2.0978674679487184, 0.0, 6.64217482638889, 8.391469871794873, 5.515394791666668, 4.633379114895743, 2.043768117020903, 0.0), # 7
(3.975292847039314, 8.249043919753085, 6.979769655098544, 3.693299758454106, 2.1103608974358976, 0.0, 6.639425603864735, 8.44144358974359, 5.5399496376811594, 4.653179770065696, 2.062260979938271, 0.0), # 8
(4.006112752414399, 8.321129820075758, 7.00842862307198, 3.709119293478261, 2.12244375, 0.0, 6.636609171195653, 8.489775, 5.563678940217391, 4.672285748714653, 2.0802824550189394, 0.0), # 9
(4.03630133645498, 8.391240179573513, 7.03601970865467, 3.724371980676329, 2.134102564102564, 0.0, 6.633725845410628, 8.536410256410257, 5.586557971014494, 4.690679805769779, 2.0978100448933783, 0.0), # 10
(4.065829381279876, 8.459285008768239, 7.06251704423736, 3.739041334541063, 2.145323878205128, 0.0, 6.630775943538648, 8.581295512820512, 5.608562001811595, 4.70834469615824, 2.1148212521920597, 0.0), # 11
(4.094667669007903, 8.525174318181818, 7.087894762210797, 3.7531108695652167, 2.156094230769231, 0.0, 6.627759782608695, 8.624376923076923, 5.6296663043478254, 4.725263174807198, 2.1312935795454546, 0.0), # 12
(4.122786981757876, 8.58881811833614, 7.112126994965724, 3.766564100241546, 2.1664001602564102, 0.0, 6.624677679649759, 8.665600641025641, 5.649846150362319, 4.741417996643816, 2.147204529584035, 0.0), # 13
(4.15015810164862, 8.650126419753088, 7.135187874892886, 3.779384541062801, 2.1762282051282047, 0.0, 6.621529951690821, 8.704912820512819, 5.669076811594202, 4.756791916595257, 2.162531604938272, 0.0), # 14
(4.1767518107989465, 8.709009232954545, 7.157051534383032, 3.7915557065217387, 2.1855649038461538, 0.0, 6.618316915760871, 8.742259615384615, 5.6873335597826085, 4.771367689588688, 2.177252308238636, 0.0), # 15
(4.202538891327675, 8.7653765684624, 7.177692105826908, 3.803061111111111, 2.194396794871795, 0.0, 6.61503888888889, 8.77758717948718, 5.7045916666666665, 4.785128070551272, 2.1913441421156, 0.0), # 16
(4.227490125353625, 8.81913843679854, 7.197083721615253, 3.8138842693236716, 2.202710416666667, 0.0, 6.611696188103866, 8.810841666666668, 5.720826403985508, 4.798055814410168, 2.204784609199635, 0.0), # 17
(4.25157629499561, 8.870204848484848, 7.215200514138818, 3.824008695652174, 2.2104923076923084, 0.0, 6.608289130434783, 8.841969230769234, 5.736013043478262, 4.810133676092545, 2.217551212121212, 0.0), # 18
(4.274768182372451, 8.918485814043208, 7.232016615788346, 3.8334179045893717, 2.2177290064102566, 0.0, 6.604818032910629, 8.870916025641026, 5.750126856884058, 4.8213444105255645, 2.229621453510802, 0.0), # 19
(4.297036569602966, 8.96389134399551, 7.247506158954584, 3.8420954106280196, 2.2244070512820517, 0.0, 6.601283212560387, 8.897628205128207, 5.76314311594203, 4.831670772636389, 2.2409728359988774, 0.0), # 20
(4.318352238805971, 9.006331448863634, 7.261643276028279, 3.8500247282608693, 2.2305129807692303, 0.0, 6.597684986413044, 8.922051923076921, 5.775037092391305, 4.841095517352186, 2.2515828622159084, 0.0), # 21
(4.338685972100283, 9.045716139169473, 7.274402099400172, 3.8571893719806765, 2.2360333333333333, 0.0, 6.5940236714975855, 8.944133333333333, 5.785784057971015, 4.849601399600115, 2.2614290347923682, 0.0), # 22
(4.358008551604722, 9.081955425434906, 7.285756761461012, 3.8635728562801934, 2.2409546474358972, 0.0, 6.590299584842997, 8.963818589743589, 5.79535928442029, 4.857171174307341, 2.2704888563587264, 0.0), # 23
(4.3762907594381035, 9.114959318181818, 7.295681394601543, 3.869158695652174, 2.2452634615384612, 0.0, 6.586513043478261, 8.981053846153845, 5.803738043478262, 4.863787596401028, 2.2787398295454544, 0.0), # 24
(4.393503377719247, 9.1446378279321, 7.304150131212511, 3.8739304045893723, 2.2489463141025636, 0.0, 6.582664364432368, 8.995785256410255, 5.810895606884059, 4.869433420808341, 2.286159456983025, 0.0), # 25
(4.409617188566969, 9.17090096520763, 7.311137103684661, 3.8778714975845405, 2.2519897435897436, 0.0, 6.5787538647343, 9.007958974358974, 5.816807246376811, 4.874091402456441, 2.2927252413019077, 0.0), # 26
(4.424602974100088, 9.193658740530301, 7.31661644440874, 3.880965489130435, 2.2543802884615385, 0.0, 6.574781861413045, 9.017521153846154, 5.821448233695653, 4.877744296272493, 2.2984146851325753, 0.0), # 27
(4.438431516437421, 9.212821164421996, 7.320562285775494, 3.8831958937198072, 2.256104487179487, 0.0, 6.570748671497586, 9.024417948717948, 5.824793840579711, 4.8803748571836625, 2.303205291105499, 0.0), # 28
(4.4510735976977855, 9.228298247404602, 7.322948760175664, 3.884546225845411, 2.257148878205128, 0.0, 6.566654612016909, 9.028595512820512, 5.826819338768117, 4.881965840117109, 2.3070745618511506, 0.0), # 29
(4.4625, 9.24, 7.32375, 3.885, 2.2575000000000003, 0.0, 6.562500000000001, 9.030000000000001, 5.8275, 4.8825, 2.31, 0.0), # 30
(4.47319183983376, 9.249720255681815, 7.323149356884057, 3.884918047385621, 2.257372225177305, 0.0, 6.556726763701484, 9.02948890070922, 5.827377071078432, 4.882099571256038, 2.312430063920454, 0.0), # 31
(4.4836528452685425, 9.259312045454546, 7.3213644202898545, 3.884673790849673, 2.2569916312056737, 0.0, 6.547834661835751, 9.027966524822695, 5.82701068627451, 4.880909613526569, 2.3148280113636366, 0.0), # 32
(4.493887715792838, 9.268774176136363, 7.3184206793478275, 3.8842696323529413, 2.2563623138297872, 0.0, 6.535910757121439, 9.025449255319149, 5.826404448529412, 4.878947119565218, 2.3171935440340907, 0.0), # 33
(4.503901150895141, 9.278105454545454, 7.314343623188405, 3.8837079738562093, 2.2554883687943263, 0.0, 6.521042112277196, 9.021953475177305, 5.825561960784314, 4.876229082125604, 2.3195263636363634, 0.0), # 34
(4.513697850063939, 9.287304687499997, 7.3091587409420296, 3.882991217320261, 2.2543738918439717, 0.0, 6.503315790021656, 9.017495567375887, 5.824486825980392, 4.872772493961353, 2.3218261718749993, 0.0), # 35
(4.523282512787724, 9.296370681818182, 7.302891521739131, 3.8821217647058828, 2.253022978723404, 0.0, 6.482818853073463, 9.012091914893617, 5.823182647058824, 4.868594347826087, 2.3240926704545455, 0.0), # 36
(4.532659838554988, 9.305302244318183, 7.295567454710145, 3.881102017973856, 2.2514397251773044, 0.0, 6.4596383641512585, 9.005758900709218, 5.821653026960784, 4.86371163647343, 2.3263255610795457, 0.0), # 37
(4.5418345268542195, 9.314098181818181, 7.287212028985508, 3.8799343790849674, 2.249628226950355, 0.0, 6.433861385973679, 8.99851290780142, 5.819901568627452, 4.858141352657005, 2.3285245454545453, 0.0), # 38
(4.5508112771739135, 9.322757301136363, 7.277850733695652, 3.87862125, 2.247592579787234, 0.0, 6.40557498125937, 8.990370319148935, 5.817931875, 4.8519004891304345, 2.330689325284091, 0.0), # 39
(4.559594789002558, 9.33127840909091, 7.267509057971015, 3.8771650326797387, 2.245336879432624, 0.0, 6.37486621272697, 8.981347517730496, 5.815747549019608, 4.845006038647344, 2.3328196022727274, 0.0), # 40
(4.568189761828645, 9.3396603125, 7.256212490942029, 3.8755681290849675, 2.2428652216312055, 0.0, 6.34182214309512, 8.971460886524822, 5.813352193627452, 4.837474993961353, 2.334915078125, 0.0), # 41
(4.576600895140665, 9.34790181818182, 7.2439865217391315, 3.8738329411764707, 2.2401817021276598, 0.0, 6.3065298350824595, 8.960726808510639, 5.810749411764706, 4.829324347826088, 2.336975454545455, 0.0), # 42
(4.584832888427111, 9.356001732954544, 7.230856639492753, 3.8719618709150327, 2.2372904166666667, 0.0, 6.26907635140763, 8.949161666666667, 5.80794280637255, 4.820571092995169, 2.339000433238636, 0.0), # 43
(4.592890441176471, 9.363958863636363, 7.216848333333333, 3.8699573202614377, 2.2341954609929076, 0.0, 6.229548754789272, 8.93678184397163, 5.804935980392157, 4.811232222222222, 2.3409897159090907, 0.0), # 44
(4.600778252877237, 9.371772017045453, 7.201987092391306, 3.8678216911764705, 2.230900930851064, 0.0, 6.188034107946028, 8.923603723404256, 5.801732536764706, 4.80132472826087, 2.3429430042613633, 0.0), # 45
(4.6085010230179035, 9.379440000000002, 7.186298405797103, 3.8655573856209147, 2.2274109219858156, 0.0, 6.144619473596536, 8.909643687943262, 5.798336078431372, 4.790865603864735, 2.3448600000000006, 0.0), # 46
(4.616063451086957, 9.386961619318182, 7.16980776268116, 3.8631668055555552, 2.223729530141844, 0.0, 6.099391914459438, 8.894918120567375, 5.794750208333333, 4.77987184178744, 2.3467404048295455, 0.0), # 47
(4.623470236572891, 9.394335681818182, 7.152540652173913, 3.8606523529411763, 2.21986085106383, 0.0, 6.052438493253375, 8.87944340425532, 5.790978529411765, 4.7683604347826085, 2.3485839204545456, 0.0), # 48
(4.630726078964194, 9.401560994318181, 7.134522563405797, 3.8580164297385626, 2.2158089804964543, 0.0, 6.003846272696985, 8.863235921985817, 5.787024644607844, 4.7563483756038645, 2.3503902485795454, 0.0), # 49
(4.6378356777493615, 9.408636363636361, 7.115778985507247, 3.8552614379084966, 2.211578014184397, 0.0, 5.953702315508913, 8.846312056737588, 5.782892156862745, 4.743852657004831, 2.3521590909090904, 0.0), # 50
(4.6448037324168805, 9.415560596590907, 7.096335407608696, 3.852389779411765, 2.2071720478723407, 0.0, 5.902093684407797, 8.828688191489363, 5.778584669117648, 4.73089027173913, 2.353890149147727, 0.0), # 51
(4.651634942455243, 9.4223325, 7.0762173188405795, 3.84940385620915, 2.2025951773049646, 0.0, 5.849107442112278, 8.810380709219858, 5.774105784313726, 4.717478212560386, 2.355583125, 0.0), # 52
(4.658334007352941, 9.428950880681818, 7.055450208333333, 3.8463060702614382, 2.1978514982269504, 0.0, 5.794830651340996, 8.791405992907801, 5.769459105392158, 4.703633472222222, 2.3572377201704544, 0.0), # 53
(4.6649056265984665, 9.435414545454544, 7.034059565217391, 3.843098823529412, 2.192945106382979, 0.0, 5.739350374812594, 8.771780425531915, 5.764648235294119, 4.689373043478261, 2.358853636363636, 0.0), # 54
(4.671354499680307, 9.441722301136364, 7.012070878623187, 3.8397845179738566, 2.1878800975177306, 0.0, 5.682753675245711, 8.751520390070922, 5.759676776960785, 4.674713919082125, 2.360430575284091, 0.0), # 55
(4.677685326086957, 9.447872954545453, 6.989509637681159, 3.8363655555555556, 2.1826605673758865, 0.0, 5.625127615358988, 8.730642269503546, 5.754548333333334, 4.65967309178744, 2.361968238636363, 0.0), # 56
(4.683902805306906, 9.453865312500001, 6.966401331521738, 3.832844338235294, 2.1772906117021273, 0.0, 5.566559257871065, 8.70916244680851, 5.749266507352941, 4.644267554347826, 2.3634663281250003, 0.0), # 57
(4.690011636828645, 9.459698181818181, 6.942771449275362, 3.8292232679738563, 2.1717743262411346, 0.0, 5.507135665500583, 8.687097304964539, 5.743834901960785, 4.628514299516908, 2.3649245454545453, 0.0), # 58
(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), # 59
)
passenger_allighting_rate = (
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 0
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 1
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 2
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 3
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 4
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 5
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 6
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 7
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 8
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 9
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 10
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 11
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 12
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 13
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 14
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 15
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 16
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 17
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 18
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 19
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 20
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 21
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 22
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 23
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 24
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 25
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 26
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 27
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 28
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 29
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 30
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 31
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 32
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 33
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 34
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 35
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 36
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 37
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 38
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 39
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 40
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 41
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 42
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 43
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 44
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 45
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 46
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 47
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 48
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 49
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 50
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 51
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 52
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 53
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 54
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 55
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 56
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 57
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 58
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 59
)
"""
parameters for reproducibiliy. More information: https://numpy.org/doc/stable/reference/random/parallel.html
"""
#initial entropy
entropy = 258194110137029475889902652135037600173
#index for seed sequence child
child_seed_index = (
1, # 0
8, # 1
)
| 113.161194
| 212
| 0.729246
| 5,147
| 37,909
| 5.368953
| 0.227705
| 0.312658
| 0.247521
| 0.468987
| 0.328002
| 0.32764
| 0.32764
| 0.32764
| 0.32764
| 0.32764
| 0
| 0.819135
| 0.119075
| 37,909
| 334
| 213
| 113.5
| 0.008355
| 0.031945
| 0
| 0.202532
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.015823
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
412e0eb8310cfb4c73e90015869e4f3009eb4387
| 65
|
py
|
Python
|
Reading Data/lesson-4-tsv-with-the-simpsons-episodes/tests/test_simpsons_shape.py
|
danielgarm/Data-Science-and-Machine-Learning
|
fa3e85cc42eb2e9f964ab5abb34d1c93e16d1cd9
|
[
"MIT"
] | null | null | null |
Reading Data/lesson-4-tsv-with-the-simpsons-episodes/tests/test_simpsons_shape.py
|
danielgarm/Data-Science-and-Machine-Learning
|
fa3e85cc42eb2e9f964ab5abb34d1c93e16d1cd9
|
[
"MIT"
] | 2
|
2022-01-11T21:04:51.000Z
|
2022-01-11T21:05:05.000Z
|
Reading Data/lesson-4-tsv-with-the-simpsons-episodes/tests/test_simpsons_shape.py
|
danielgarm/Data-Science-and-Machine-Learning
|
fa3e85cc42eb2e9f964ab5abb34d1c93e16d1cd9
|
[
"MIT"
] | null | null | null |
def test_simpsons_shape():
assert simpsons.shape == (597, 3)
| 21.666667
| 37
| 0.692308
| 9
| 65
| 4.777778
| 0.777778
| 0.604651
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074074
| 0.169231
| 65
| 2
| 38
| 32.5
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
41387a37d707e04863616d4e8e65c5b638d82d0a
| 26
|
py
|
Python
|
EffectLoops/__init__.py
|
jcksnvllxr80/MidiController
|
de6d3c983cd27408e88a744a0a4d3c887efa3d54
|
[
"MIT"
] | 1
|
2021-06-06T15:36:27.000Z
|
2021-06-06T15:36:27.000Z
|
EffectLoops/__init__.py
|
jcksnvllxr80/MidiController
|
de6d3c983cd27408e88a744a0a4d3c887efa3d54
|
[
"MIT"
] | 1
|
2021-06-06T15:37:42.000Z
|
2021-06-06T15:37:42.000Z
|
EffectLoops/__init__.py
|
jcksnvllxr80/MidiController
|
de6d3c983cd27408e88a744a0a4d3c887efa3d54
|
[
"MIT"
] | null | null | null |
from EffectLoops import *
| 13
| 25
| 0.807692
| 3
| 26
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 26
| 1
| 26
| 26
| 0.954545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.